本文整理汇总了PHP中scraperWiki::save_sqlite方法的典型用法代码示例。如果您正苦于以下问题:PHP scraperWiki::save_sqlite方法的具体用法?PHP scraperWiki::save_sqlite怎么用?PHP scraperWiki::save_sqlite使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类scraperWiki
的用法示例。
在下文中一共展示了scraperWiki::save_sqlite方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。
示例1: scrapeTeams
function scrapeTeams($url)
{
$html = scraperWiki::scrape($url);
$dom = new simple_html_dom();
$dom->load($html);
$cells = $dom->find('td.cw a');
foreach ($cells as $cell) {
$name = $cell->plaintext;
$team = array('club' => $name);
scraperWiki::save_sqlite(array('club'), $team);
}
}
示例2: scrapePage
function scrapePage($url)
{
$html = scraperWiki::scrape($url);
$dom = new simple_html_dom();
$dom->load($html);
$cells = $dom->find('td.nom');
foreach ($cells as $cell) {
$name = $cell->find('a', 0)->plaintext;
$parent = $cell->parent();
$count = $parent->find('td.compte', 0)->plaintext;
if ($count) {
$payload = array('name' => $name, 'count' => $count);
scraperWiki::save_sqlite(array('name'), $payload);
}
}
}
示例3: scrapeSmellsCategory
function scrapeSmellsCategory($url)
{
$html = scraperWiki::scrape($url);
$dom = new simple_html_dom();
$dom->load($html);
$products = $dom->find('div.product');
foreach ($products as $product) {
$brand = $product->find('h3 > a', 0);
$productName = $product->find('p.productName > a', 0);
echo $productName->innertext;
$prices = $product->find('p.productPrice', 0);
$rrp = $prices->find('a', 0);
$ourPrice = $prices->find('a.ourPrice', 0);
$productDesc = $product->find('p.productDesc a', 0);
$fixedRrp = substr($rrp->innertext, 10);
$fixedPrice = substr($ourPrice->innertext, 16);
$data = array('brand' => $brand->innertext, 'product' => $productName->innertext, 'rrp' => $fixedRrp, 'price' => $fixedPrice, 'desc' => $productDesc->innertext, 'url' => $url);
scraperWiki::save_sqlite(array('brand', 'product', 'rrp', 'price', 'desc', 'url'), $data);
}
}
示例4: saveData
function saveData($unique, $flight_data)
{
scraperWiki::save_sqlite($unique, $flight_data);
}
示例5: file_get_contents
}
//Current Synths
$synthList3 = file_get_contents("https://api.scraperwiki.com/api/1.0/datastore/sqlite?format=jsondict&name=current_synths&query=select%20DISTINCT%20manufacturer%2C%20url%2C%20name%20from%20%60swdata%60");
if (!empty($synthList3)) {
$synthList3 = json_decode($synthList3);
}
$synths = array();
$synths = traverseList($synthList1);
$synths = array_merge(traverseList($synthList2), $synths);
$synths = array_merge(traverseList($synthList3), $synths);
$synths = array_map('unserialize', array_unique(array_map('serialize', $synths)));
echo "Total synths: " . count($synths) . "\n";
//var_dump($synths);
if (!empty($synths)) {
//$dbName = "vintagesynth-scrape-".$today = date("m-d-Y");
$saveMessage = scraperWiki::save_sqlite(array('manufacturer', 'name', 'url'), $synths);
//print strval($saveMessage);
scraperwiki::save_var('total_results', count($synths));
print scraperWiki::get_var('total_results');
}
function traverseList($list)
{
$dataList = array();
foreach ($list as $item) {
//Clean up the data
foreach ($item as $key => $value) {
$item->{$key} = preg_replace("/<*.>/", "", $value);
//echo $item->$key."\n";
}
$dataList[] = $item;
}
示例6: simple_html_dom
$dom = new simple_html_dom();
$dom->load($html);
$position = 0;
$result = array();
foreach ($dom->find("span[@class='topsites-label']") as $data) {
$result = $data->plaintext;
$target = scraperWiki::scrape($result);
$ga = FALSE;
if (strpos($target, 'ga.js') !== FALSE) {
$ga = TRUE;
}
scraperWiki::save_sqlite(array("data"), array('data' => $result, 'ga' => $ga));
}
}
require 'scraperwiki/simple_html_dom.php';
for ($i = 0; $i < 20; $i++) {
$html = scraperWiki::scrape("http://www.alexa.com/topsites/countries;{$i}/CZ");
$dom = new simple_html_dom();
$dom->load($html);
$position = 0;
$result = array();
foreach ($dom->find("span[@class='topsites-label']") as $data) {
$result = $data->plaintext;
$target = scraperWiki::scrape($result);
$ga = FALSE;
if (strpos($target, 'ga.js') !== FALSE) {
$ga = TRUE;
}
scraperWiki::save_sqlite(array("data"), array('data' => $result, 'ga' => $ga));
}
}
示例7: saveData
function saveData($unique, $flight_data)
{
scraperWiki::save_sqlite(array("date", "airline", "flight_num"), $flight_data);
}
示例8: simple_html_dom
<?php
require 'scraperwiki/simple_html_dom.php';
scraperWiki::sqliteexecute('drop table if exists ttt');
$url = 'http://www.rugbydump.blogspot.com/';
$html = scraperWiki::scrape($url);
$dom = new simple_html_dom();
$dom->load($html);
$movies = $dom->find('object param[name="movie"]');
$i = 0;
foreach ($movies as $movie) {
$i++;
$movieurl = $movie->value;
$data = array('id' => "code:New_Script_Resolve('" . $movieurl . "')", 'title' => 'Video ' . $i);
scraperWiki::save_sqlite(array('id'), $data);
}
require 'scraperwiki/simple_html_dom.php';
scraperWiki::sqliteexecute('drop table if exists ttt');
$url = 'http://www.rugbydump.blogspot.com/';
$html = scraperWiki::scrape($url);
$dom = new simple_html_dom();
$dom->load($html);
$movies = $dom->find('object param[name="movie"]');
$i = 0;
foreach ($movies as $movie) {
$i++;
$movieurl = $movie->value;
$data = array('id' => "code:New_Script_Resolve('" . $movieurl . "')", 'title' => 'Video ' . $i);
scraperWiki::save_sqlite(array('id'), $data);
}
示例9: simple_html_dom
<?php
require 'scraperwiki/simple_html_dom.php';
$url = 'http://www.rugbydump.blogspot.com/';
$html = scraperWiki::scrape($url);
$dom = new simple_html_dom();
$dom->load($html);
$movies = $dom->find('object param[name="movie"]');
foreach ($movies as $movie) {
$movieurl = $movie->value;
$data = array('video' => $movieurl);
scraperWiki::save_sqlite(array('video'), $data);
}
require 'scraperwiki/simple_html_dom.php';
$url = 'http://www.rugbydump.blogspot.com/';
$html = scraperWiki::scrape($url);
$dom = new simple_html_dom();
$dom->load($html);
$movies = $dom->find('object param[name="movie"]');
foreach ($movies as $movie) {
$movieurl = $movie->value;
$data = array('video' => $movieurl);
scraperWiki::save_sqlite(array('video'), $data);
}
示例10: saveData
function saveData($unique, $railway)
{
scraperWiki::save_sqlite($unique, $railway);
}
示例11: foreach
foreach ($view_dom->find('div[id=left_col] img[class=imgcenter]') as $element) {
$synthImages .= "http://www.vintagesynth.com" . $subDir . $element->src . ",";
}
$synthDescription = "";
foreach ($view_dom->find("div[class=grid_11] div[id=left_col] p") as $view_data) {
$synthDescription .= $view_data->plaintext . "<br/>";
}
$synths[] = array('name' => $cleanSynthName, 'manufacturer' => $navLink[0]->plaintext, 'url' => "http://www.vintagesynth.com" . $subNavLinkURL, 'description' => $synthDescription, 'images' => $synthImages);
/*echo "<pre>";
print_r($synths);
echo "</pre>";*/
}
} else {
break;
}
}
} else {
print "The scrape has completed at a depth level of {$depth}.\n";
break;
}
}
}
/*echo "<pre>";
print_r($synths);
echo "</pre>";*/
//$dbName = "vintagesynth-scrape-".$today = date("m-d-Y");
//$saveMessage = scraperWiki::save_sqlite(array('manufacturer','name','url','description','images'), $synths,$table_name=$dbName);
$saveMessage = scraperWiki::save_sqlite(array('manufacturer', 'name', 'url', 'description', 'images'), $synths);
print strval($saveMessage);
scraperwiki::save_var('total_results', count($synths));
print scraperWiki::get_var('total_results');
示例12: saveData
function saveData($unique, $record)
{
scraperWiki::save_sqlite(array("Data_autocount", "data_blank", "data_Feedback"), $record);
}
示例13: acount
# www.munich-airport.de
# we extracted information of the arivales of the munich airport
# we wanted schedule time of 5 minutes but this is not alowed for standard acount (1 day)
require "scraperwiki/simple_html_dom.php";
$html = scraperWiki::scrape("http://www.munich-airport.de/de/consumer/index.jsp");
# Use the PHP Simple HTML DOM Parser to extract <td> tags
$dom = new simple_html_dom();
$dom->load($html);
$table = $dom->getElementById('navigation_mainpage_flightinfo_table');
foreach ($table->find('tr') as $data) {
print $data->plaintext . "\n";
// Flight details. Read tds or ths
$tds = sizeof($data->find("td")) > 1 ? $data->find("td") : $data->find("th");
if (sizeof($tds) == 0) {
break;
}
$flightnr = $tds[1]->plaintext;
$from = $tds[2]->plaintext;
$time = $tds[3]->plaintext;
$expected_time = $tds[4]->plaintext;
// Skip header
if ($flightnr == "Flug") {
continue;
}
//Create date
$date = date("m.d.y");
//Build array of flight informations
$flight_data = array("date" => $date, "flightnr" => $flightnr, "from" => $from, "time" => $time, "expected_time" => $expected_time);
//Save the informations of one flight
scraperWiki::save_sqlite(array("date", "flightnr"), $flight_data);
}
示例14: elseif
if ($td == "Évjárat:") {
$prevtd = "Évjárat:";
} elseif ($td == "Kivitel:") {
$prevtd = "Kivitel:";
} elseif ($td == "Állapot:") {
$prevtd = "Állapot:";
} elseif ($td == "Üzemanyag:") {
$prevtd = "Üzemanyag:";
} elseif ($td == "Sebességváltó fajtája:") {
$prevtd = "Sebességváltó fajtája:";
} elseif ($td == "Hengerűrtartalom:") {
$prevtd = "Hengerűrtartalom:";
} elseif ($td == "Henger-elrendezés:") {
$prevtd = "Henger-elrendezés:";
} elseif ($td == "Hajtás:") {
$prevtd = "Hajtás:";
} elseif ($td == "Teljesítmény:") {
$prevtd = "Teljesítmény:";
} elseif ($td == "Ajtók száma:") {
$prevtd = "Ajtók száma:";
} elseif ($td == "Klíma fajtája:") {
$prevtd = "Klíma fajtája:";
} elseif ($td == "Szín:") {
$prevtd = "Szín:";
} else {
$prevtd = "...";
}
}
}
scraperWiki::save_sqlite(array('id'), array('id' => $kod, 'url' => $newurl, 'ar' => $ar, 'evjarat' => $evjarat, 'kivitel' => $kivitel, 'allapot' => $allapot, 'uzemanyag' => $uzemanyag, 'sebvalto' => $sebvalto, 'urtartalom' => $urtartalom, 'hengerelrend' => $hengerelr, 'hajtas' => $hajtas, 'teljesitmeny' => $teljesitmeny, 'ajtok' => $ajtok, 'klima' => $klima, 'szin' => $szin));
}
示例15: saveMembersLinks
function saveMembersLinks($unique, $recordLinks)
{
scraperWiki::save_sqlite(array("data_MemberID"), $recordLinks, "membersLinks");
}