当前位置: 首页>>代码示例>>PHP>>正文


PHP scraperwiki::table_info方法代码示例

本文整理汇总了PHP中scraperwiki::table_info方法的典型用法代码示例。如果您正苦于以下问题:PHP scraperwiki::table_info方法的具体用法?PHP scraperwiki::table_info怎么用?PHP scraperwiki::table_info使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在scraperwiki的用法示例。


在下文中一共展示了scraperwiki::table_info方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。

示例1: preg_replace

            }
        } else {
            $synthManufacturer = $manufacturerQuery;
            //Search for a specific manufacturer
            $tManName = preg_replace("/\\s/", "_", $synthManufacturer);
            //Build a temp name for the array key
            $synthManufacturer = urlencode($synthManufacturer);
            echo "Searching manufacturer: " . $synthManufacturer . "\n";
            $foundSynths[$state][$tManName] = findSynthsOnCraigslist($state, $jsonManufacturerScraper, $synthManufacturer, $ignoreWords, $cityDepth, $synthDepth, $depthOverride);
            if (!empty($foundSynths[$state][$tManName])) {
                //scraperwiki::save_var($state.'-'.$synthManufacturer.'-'.'synths', json_encode($foundSynths[$state][$synthManufacturer]));
                $saveMessage = scraperWiki::save_sqlite(array('manufacturer', 'synth_name', 'post_item_date', 'post_item_name', 'post_item_price', 'post_item_link', 'post_item_state', 'query', 'link', 'post_item_description', 'post_item_images'), $foundSynths[$state][$tManName]);
                print strval($saveMessage);
            } else {
                //Delete existing data
                $info = scraperwiki::table_info($name = "swdata");
                if (!empty($info)) {
                    scraperwiki::sqliteexecute("DELETE FROM swdata");
                    //Truncate the table before adding new results
                }
            }
        }
    }
}
/**
* Parses craigslist and returns synths
* @param String state
* @param String synthManufacturer
* @param ignoreWords
* @param cityDepth
* @param synthDepth
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:31,代码来源:washington_digisound_craigslist_synth_scraper.php

示例2: array_unique

            $OBJ['linkedIn'] = $LinkedIn;
        }
    }
    // Clean certifications
    $certifications = array_unique(json_decode($row['certifications']));
    $OBJ['certifications'] = json_encode($certifications);
    // Geo
    scraperwiki::save_sqlite(array('id', 'name', 'company', 'location', 'date', 'url', 'profile', 'twitter', 'klout', 'profile_url', 'linkedIn', 'certifications'), $OBJ);
    scraperwiki::save_var('last_page', $counter);
    $counter = $counter + 1;
    print_r($counter);
}
print_r("start");
scraperwiki::attach("appcelerator_devlink");
// Bootstrap variables
if (!scraperwiki::table_info($name = "swvariables")) {
    scraperwiki::save_var('last_page', 0);
}
$lastPage = scraperwiki::get_var('last_page');
if ($lastPage > 0) {
    $offset = " OFFSET " . $lastPage;
    $counter = $lastPage;
} else {
    $offset = "";
    $counter = 0;
}
print_r($offset);
$data = scraperwiki::select("* from appcelerator_devlink.swdata LIMIT 1500" . $offset);
foreach ($data as $row) {
    $OBJ = array('id' => $row['id'], 'name' => $row['name'], 'company' => $row['company'], 'location' => $row['location'], 'date' => $row['date'], 'url' => $row["url"], 'profile' => $row["profile"], 'twitter' => '', 'klout' => '', 'linkedIn' => '', 'certifications' => '');
    // Clean Links
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:31,代码来源:clean_appcelerator_devlinks.php

示例3: array

            if (!empty($total_opp_days)) {
                $bug_rate = $total_bugs / $total_opp_days;
            }
            // Build data record
            $uniquekeys = array("name", "url");
            $data = array("name" => $data->plaintext, "url" => $data->href, "author" => $author, "maintenance" => $maintenance_status, "dev_status" => $dev_status, "rec_release_version" => $rec_release['version'], "rec_release_type" => $rec_release['type'], "rec_release_timestamp" => $rec_release['timestamp'], "downloads" => $downloads, "installs" => $installs, "bugs" => $open_bugs, "total_bugs" => $total_bugs, "age_years" => round($age / 86400 / 365.25, 2), "mbug_rate" => round($bug_rate * 1000, 2), "created" => $created, "modified" => $lastmodified, "last_commit" => $last_commit);
            scraperwiki::save($uniquekeys, $data);
        }
    }
}
$page = array("http://drupal.org/search/site/commerce?f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=1&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=2&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=3&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=4&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=5&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=6&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=7&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=8&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=9&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=10&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=11&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule", "http://drupal.org/search/site/commerce?page=12&f[0]=drupal_core%3A103&f[1]=bs_project_sandbox%3A0&f[2]=ss_meta_type%3Amodule");
require 'scraperwiki/simple_html_dom.php';
// To change schema:
// - Clear data
// - Run
if (!scraperwiki::table_info('swdata')) {
    scraperwiki::sqliteexecute("create table swdata (`name` string, `url` string, `author` string, `maintenance` string, `dev_status` string, `rec_release_version` string, `rec_release_type` string, `rec_release_timestamp` datetime, `downloads` int, `installs` int, `bugs` int, `total_bugs` int, `age_years` float, `mbug_rate` float, `created` datetime, `modified` datetime, `last_commit` datetime)");
    scraperwiki::sqlitecommit();
}
$now = time();
$i = 0;
foreach ($page as $id => $url) {
    $html = scraperWiki::scrape($url);
    $dom = new simple_html_dom();
    $dom->load($html);
    // Grab urls for each module
    foreach ($dom->find("dt[@class='title'] a") as $data) {
        $i++;
        if (stristr($data->href, "commerce_")) {
            // Load Module Data
            $html2 = scraperWiki::scrape($data->href);
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:31,代码来源:drupal_commerce_modules_1.php

示例4: debug_table

function debug_table($schema, $tname, $showSchema = FALSE)
{
    global $DEBUG_TABLES;
    if (!$DEBUG_TABLES) {
        return;
    }
    $tablename = $schema . "." . $tname;
    $num = scraperwiki::select("count(*) AS n FROM " . $tablename);
    print "{$tablename} size: " . $num[0]['n'] . " rows.\n";
    if ($showSchema) {
        print "{$tablename} schema: ";
        $info = scraperwiki::table_info($tablename);
        #debug_array($info, "Table_info($tablename)");
        foreach ($info as $column) {
            print $column['name'] . "(" . $column['type'] . "); ";
        }
        print "\n";
    }
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:19,代码来源:hydro_planning_applications_3.php

示例5: getData

function getData($query)
{
    if (!count(scraperwiki::table_info("swdata"))) {
        return false;
    }
    return scraperWiki::select($query);
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:7,代码来源:whoint_gap.php


注:本文中的scraperwiki::table_info方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。