当前位置: 首页>>代码示例>>PHP>>正文


PHP scraperwiki::sqliteexecute方法代码示例

本文整理汇总了PHP中scraperwiki::sqliteexecute方法的典型用法代码示例。如果您正苦于以下问题:PHP scraperwiki::sqliteexecute方法的具体用法?PHP scraperwiki::sqliteexecute怎么用?PHP scraperwiki::sqliteexecute使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在scraperwiki的用法示例。


在下文中一共展示了scraperwiki::sqliteexecute方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的PHP代码示例。

示例1: populateDOM

function populateDOM($htmlDOM, $src_link, $upd_flag = false)
{
    scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS sources (src_link TEXT PRIMARY KEY, timestamp DATETIME, src_dump TEXT)");
    echo "Checking local cache...<br>\n";
    $result = scraperwiki::sqliteexecute("SELECT src_link, timestamp, src_dump FROM sources WHERE src_link = :slnk", array("slnk" => $src_link));
    if (empty($result->data[0][2]) || $upd_flag == true) {
        echo "No Cache for this site (or force-update flag given), scraping live site for local cache...<br>\n";
        // Load the site and save it locally so that we dont end up crawling their site a million times during development
        $source = scraperWiki::scrape($src_link);
        $htmlDOM->load($source);
        $save_source = $htmlDOM->save();
        echo "Scrape complete, storing into cache...<br>\n";
        scraperwiki::sqliteexecute("INSERT OR REPLACE INTO sources VALUES (:slnk, :stime, :sdmp)", array("slnk" => $src_link, "stime" => time(), "sdmp" => $save_source));
        scraperwiki::sqlitecommit();
        echo "Cache saved.<br>\n";
        echo "Populate DOM Complete.";
        return $htmlDOM;
    } else {
        echo "Using local cache, as cached data exists from '" . date(DATE_RFC822, $result->data[0][1]) . ".'<br>\n";
        echo "Loading...<br>\n";
        $htmlDOM->load($result->data[0][2]);
        echo "Populate DOM Complete.";
        return $htmlDOM;
    }
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:25,代码来源:local_cache_scraper.php

示例2: grep_munich

function grep_munich($url, $table_name)
{
    $html = scraperWiki::scrape($url);
    $count = 0;
    # Use the PHP Simple HTML DOM Parser to extract <td> tags
    $dom = new simple_html_dom();
    $dom->load($html);
    //Drop all old informations by dropping the table
    scraperwiki::sqliteexecute("drop table if exists " . $table_name);
    scraperwiki::sqlitecommit();
    $table = $dom->getElementById('flight_info_area');
    foreach ($table->find('tr') as $data) {
        // Flight details. Read tds or ths
        $tds = $data->find("td");
        //if there are less then 7 columns continue to next loop
        if (sizeof($tds) < 7) {
            continue;
        }
        //print $data->plaintext . "\n";
        $flightnr = $tds[1]->plaintext;
        $from = $tds[2]->plaintext;
        $time = $tds[3]->plaintext;
        $expected_time = $tds[4]->plaintext;
        //Create date
        $date = date("Y-m-d");
        //Build array of flight informations
        $flight_data = array("date" => $date, "count" => $count, "flightnr" => $flightnr, "from" => $from, "time" => $time, "expected_time" => $expected_time);
        //Save the informations of one flight
        scraperwiki::save_sqlite(array("date", "count"), $flight_data, $table_name);
        $count = $count + 1;
    }
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:32,代码来源:munich_airport.php

示例3: alreadyKnown

function alreadyKnown($cat, $url)
{
    $data = scraperwiki::sqliteexecute("select distinct id from swdata where cat='" . $cat . "' and url='" . $url . "'");
    if (count($data->data) === 0) {
        return false;
    }
    echo "already known : " . $url . " in " . $cat . "\n";
    return true;
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:9,代码来源:openeduol.php

示例4: insertar

function insertar($nombreTabla, $idContrato, $fecha, $objeto, $importe, $adjudicatario, $NIF)
{
    scraperwiki::sqliteexecute("insert into " . $nombreTabla . " values (?,?,?,?,?,?)", array($idContrato, $fecha, utf8_decode($objeto), $importe, utf8_decode($adjudicatario), $NIF));
    scraperwiki::sqlitecommit();
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:5,代码来源:contratos_menores_junta_q2_2013.php

示例5: array

            $locationid = "";
            $locationname = "No data";
            $locationlatitude = "";
            $locationlongitude = "";
            $error = $data['error']['message'];
            $categorytitle = "";
            $categoryid = "";
            $mediaid = "";
            $mediatype = "";
            $medialink = "";
            $mediathumb = "";
            $reports["{$id}"] = array("ID" => $id, "Error" => $error, "Title" => $title, "Category" => $categorytitle, "CategoryID" => $categoryid, "Incidentdescription" => $incidentdescription, "Incidentdate" => $incidentdate, "Incidentmode" => $incidentmode, "Incidentactive" => $incidentactive, "Incidentverified" => $incidentverified, "Locationid" => $locationid, "Locationname" => $locationname, "Locationlatitude" => $locationlatitude, "Locationlongitude" => $locationlongitude, "MediaID" => $mediatype, "MediaType" => $mediatype, "MediaLink" => $medialink, "MediaThumb" => $mediathumb);
        }
    }
    print "\n" . "end" . "\n";
}
#print_r($reports);
#scraperwiki::sqliteexecute("drop table reports");
scraperwiki::sqliteexecute("create table if not exists reports ('id' string, 'title' string, 'incidentdescription' string, 'incidentdate' string, 'incidentmode' string, 'incidentactive' string, 'incidentverified' string, 'locationid' string, 'locationname' string, 'locationlatitude' string, 'locationlongitude' string, 'categorytitle' string, 'categoryid' string, 'error' string, 'mediaid' string, 'mediatype' string, 'medialink' string, 'mediathumb' string)");
foreach ($reports as $id => $values) {
    #foreach ($reports as $key => $values) {
    scraperwiki::sqliteexecute("insert or replace into reports values (:id, :title, :incidentdescription, :incidentdate, :incidentmode, :incidentactive, :incidentverified, :locationid, :locationname, :locationlatitude, :locationlongitude, :categorytitle, :categoryid, :error, :mediaid, :mediatype, :medialink, :mediathumb)", $reports[] = array("title" => $values["Title"], "id" => $id, "incidentdescription" => $values["Incidentdescription"], "incidentdate" => $values["Incidentdate"], "incidentmode" => $values["Incidentmode"], "incidentactive" => $values["Incidentactive"], "incidentverified" => $values["Incidentverified"], "locationid" => $values["Locationid"], "locationname" => $values["Locationname"], "locationlatitude" => $values["Locationlatitude"], "locationlongitude" => $values["Locationlongitude"], "categorytitle" => $values["Category"], "categoryid" => $values["CategoryID"], "error" => $values["Error"], "mediaid" => $values["MediaID"], "mediatype" => $values["MediaType"], "medialink" => $values["MediaLink"], "mediathumb" => $values["MediaThumb"]));
    #}
}
#  $unique_keys = array("id","title");
#$table = "reports";
#if (isset($reports)){
#scraperwiki::save_sqlite($unique_keys, $reports, $table);
#}
scraperwiki::sqlitecommit();
#}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:31,代码来源:fysapisimplebecausescraperwikidoesntworkorasnwerse.php

示例6: count

//Convert to array
if (!empty($_GET['start'])) {
    $start = $_GET['start'];
}
// To take care global variable if OFF
if (empty($start)) {
    $start = 0;
}
if (strlen($start) > 0 and !is_numeric($start)) {
    //echo "Data Error";
    //exit;
    $start = 0;
}
$sourcescraper = 'california_craigslist_synth_collector';
scraperwiki::attach($sourcescraper);
$recordCount = scraperwiki::sqliteexecute("SELECT count(*) FROM {$sourcescraper}.swdata");
$recordCount = $recordCount->data[0][0];
//echo "Total Records: ".$recordCount."\n";
$totalPages = ceil($recordCount / $limit);
//Make sure that the user doesn't pass go.
if ($start > $recordCount) {
    if ($recordCount > $limit) {
        $start = $recordCount - $limit;
    } else {
        $start = $limit - $recordCount;
    }
}
$eu = $start - 0;
$current = $eu;
$back = $current - $limit;
if ($back < 0) {
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:31,代码来源:california_synths_from_craigslist.php

示例7: cleaningCampusTable

function cleaningCampusTable()
{
    $tableName = DEF_TABLE_CAMPUS;
    scraperwiki::sqliteexecute("drop table if exists {$tableName}");
    $createTableSQL = <<<_END_SQL_
CREATE TABLE `{$tableName}` (
    `campus_cd` string, 
    `campus_name` string,
    `school_cd` string, 
    `school_name` string, 
    `school_category_cd` string, 
    `school_category_name` string, 
    `zip` string,
    `address` string,
    `latitude` blob,
    `longitude` blob,
    PRIMARY KEY(
        `campus_cd`
    )
)
_END_SQL_;
    scraperwiki::sqliteexecute($createTableSQL);
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:23,代码来源:mynavi_landmark.php

示例8: ll

<?php

# Blank PHP
//scraperwiki::sqliteexecute("DROP TABLE last_update");
scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n        id INTEGER PRIMARY KEY ASC,\n        HITs_available int,\n        jobs int\n    )");
scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n        id INTEGER PRIMARY KEY ASC,\n        HITs_available int,\n        jobs int\n    )");
//scraperwiki::sqliteexecute('update ll set jobs = 100 AND (set jobs = 200)');
scraperwiki::sqliteexecute('INSERT INTO ll (jobs) VALUES (100), (200), (300)');
scraperwiki::sqlitecommit();
//
$res = scraperwiki::select('* FROM ll');
print_r($res[0]);
//print_r(scraperwiki::show_tables());
//print_r(scraperwiki::table_info($name="last_update"));
# Blank PHP
//scraperwiki::sqliteexecute("DROP TABLE last_update");
scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n        id INTEGER PRIMARY KEY ASC,\n        HITs_available int,\n        jobs int\n    )");
scraperwiki::sqliteexecute("CREATE TABLE IF NOT EXISTS ll (\n        id INTEGER PRIMARY KEY ASC,\n        HITs_available int,\n        jobs int\n    )");
//scraperwiki::sqliteexecute('update ll set jobs = 100 AND (set jobs = 200)');
scraperwiki::sqliteexecute('INSERT INTO ll (jobs) VALUES (100), (200), (300)');
scraperwiki::sqlitecommit();
//
$res = scraperwiki::select('* FROM ll');
print_r($res[0]);
//print_r(scraperwiki::show_tables());
//print_r(scraperwiki::table_info($name="last_update"));
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:26,代码来源:test_173.php

示例9: saveTable

function saveTable($data, $pkey, $name, $keys = null)
{
    if (empty($name)) {
        $name = 'swdata';
    }
    if (is_string($pkey)) {
        $pkey = array($pkey);
    }
    if (empty($keys)) {
        $keys = getTableKeys($data);
    }
    if (!in_array($pkey[0], $keys)) {
        print "Table '{$name}' have no key '{$pkey['0']}', adding it";
        for ($i = 0; $i < count($data); $i++) {
            $data[$i][$pkey[0]] = $i + 1;
        }
    }
    scraperwiki::sqliteexecute("drop table if exists {$name}");
    if ($keys) {
        scraperwiki::sqliteexecute("create table {$name} (" . join(', ', $keys) . ')');
    }
    scraperwiki::save_sqlite($pkey, $data, $name, $keys);
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:23,代码来源:template.php

示例10: init

function init()
{
    scraperwiki::sqliteexecute("drop table if exists jsen_area");
    scraperwiki::sqliteexecute("CREATE TABLE `jsen_area` (`prefecture` string, `larea` string, `count` number, PRIMARY KEY(`prefecture`, `larea`))");
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:5,代码来源:first_scrape_1.php

示例11: simple_html_dom

$outerdom = new simple_html_dom();
$outerdom->load($outerhtml);
foreach ($outerdom->find('.datatable a') as $outerdata) {
    $outerdata->href = str_replace("default.aspx?catid=80&amp;pagetype=88&amp;sglid=3&amp;fld=", "", $outerdata->href);
    $html = scraperwiki::scrape("http://www.caa.co.uk/default.aspx?catid=80&pagetype=88&sglid=3&fld=" . $outerdata->href);
    $dom = new simple_html_dom();
    $dom->load($html);
    foreach ($dom->find('.datatable') as $datapage) {
        foreach ($datapage->find('a') as $page) {
            if (stripos($page, ".csv") !== false) {
                //print $page->href;
                $data = scraperWiki::scrape("http://www.caa.co.uk/" . $page->href);
                $rows = explode("\n", $data);
                // Extract CSV header
                $headers = str_getcsv(array_shift($rows));
                $sql = "create table if not exists swdata ({$headers})";
                print $sql;
                scraperwiki::sqliteexecute($sql);
                print_r($headers);
                foreach ($rows as $row) {
                    print_r($rows);
                    $row = str_getcsv($row);
                    //print_r(scraperwiki::show_tables());
                    //scraperwiki::save_sqlite(array("header"),array("header"=>1, "data"=>"Hi there"));
                    //scraperwiki::save(array($headers), $line);
                }
                exit;
            }
        }
    }
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:31,代码来源:untitled1_1.php

示例12: getStopInfo

function getStopInfo($stop_id)
{
    $result = scraperwiki::sqliteexecute("select * from stop where stop_id ='" . $stop_id . "'");
    if (isset($result->data[0][3]) && $result->data[0][3] != '') {
        //stop already in database, no need to parse
    } else {
        global $stop_url;
        $full_stop_url = $stop_url . $stop_id;
        $html = scraperWiki::scrape($full_stop_url);
        $dom = new simple_html_dom();
        $dom->load($html);
        $title = $dom->find('title');
        if (stripos($title[0]->plaintext, 'not found')) {
            //404 log and carry on
            return;
        }
        $name_dom = $dom->find('h1');
        $name = $name_dom[0]->plaintext;
        $zone_dom = $dom->find('table.header-column td');
        $zone = $zone_dom[0]->plaintext;
        $desc = $zone_dom[1]->plaintext;
        $map_dom = $dom->find('a#map-link');
        $map_link = $map_dom[0]->href;
        // OLD FORMAT
        // http://maps.google.com/maps/api/staticmap?size=320x250&amp;sensor=false&amp;markers=color:red|label:A|-27.925015,153.338751&amp;
        //New form, seems someone at Translink buggered the encoding.
        //http://maps.google.com/maps/api/staticmap?size=320x250&sensor=false&markers=color:red%7Clabel:A%7C-27.464013,153.029148&&client=gme-translinktransit&signature=5_hQHrTJhHBlK8Aur1LLoNBNkIk=
        /* For old format, uncomment if encoding gets fixed  
          $latlon =  substr( $map_link, strrpos ($map_link , '|')+1, -5); 
         
         */
        //new format
        // print 'mapLink: '. $map_link;
        $map_start = strrpos($map_link, '%7C') + 3;
        $map_end = strlen($map_link) - strrpos($map_link, '&amp;&amp;');
        //print 'map start: '.$map_start. ', map end: '.$map_end;
        $latlon = substr($map_link, $map_start, $map_end * -1);
        //print 'latlong:'. $latlon;
        $latlon_ar = preg_split("/,/", $latlon);
        $lat = $latlon_ar[0];
        $long = $latlon_ar[1];
        $stop = array('stop_id' => $stop_id, 'name' => $name, 'desc' => $desc, 'zone' => $zone, 'lat' => $lat, 'long' => $long);
        scraperwiki::save_sqlite(array('stop_id'), $stop, "stop", 0);
    }
}
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:45,代码来源:translink_australia_timetable_scraper.php

示例13: strtotime

                    $tmp = strtotime($mm[1]);
                    $expire_date = date('Y-m-d', $tmp);
                    $row['expire_date'] = $expire_date;
                    $row['_executed'] = date('Y-m-d H:i:s');
                }
            }
        }
        scraperwiki::save_sqlite(array('guid'), $row, "plum", $verbose);
    }
}
require 'scraperwiki/simple_html_dom.php';
$verbose = 0;
$url = "http://www.plumdistrict.com/deals/rss.xml?affiliate_url=http://gan.doubleclick.net/gan_click?lid=41000000032549767&pubid=21000000000320750";
$feed_html = scraperWiki::scrape($url);
if ($feed_html) {
    scraperwiki::sqliteexecute("drop table if exists plum");
    $feed_dom = new simple_html_dom();
    $feed_dom->load($feed_html);
    $arr = array();
    foreach ($feed_dom->find('item') as $item) {
        $row = array();
        $deep_link = $row['guid'] = $item->find('guid', 0)->plaintext;
        $row['deal_content_id'] = $item->find('deal_content_id', 0)->plaintext;
        if ($deep_link) {
            print $deep_link . "\n";
            $deep_html = scraperWiki::scrape($deep_link);
            if ($deep_html) {
                if (preg_match('|Expires ([\\d]{2}/[\\d]{2}/[\\d]{2})|si', $deep_html, $mm)) {
                    $tmp = strtotime($mm[1]);
                    $expire_date = date('Y-m-d', $tmp);
                    $row['expire_date'] = $expire_date;
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:31,代码来源:devtest_plum.php

示例14: array

                        $delai = "sup_annee";
                }
            } else {
                $delai = "no_answer";
            }
        }
    }
    /*
     *  Stores results
     */
    $result = array("id" => $q_num, "q_texte" => utf8_encode($q_texte), "q_date" => $date_q, "a_date" => $date_a, "delai" => utf8_encode($delai));
    scraperwiki::save_sqlite(array("id"), $result);
}
require_once 'scraperwiki/simple_html_dom.php';
$last_id = 0;
$last_id_array = scraperwiki::sqliteexecute("select max(id) from swdata");
if ($last_id_array) {
    $last_id = $last_id_array->data[0][0];
}
$last_id++;
for ($q_num = $last_id; $q_num <= $last_id + 500; $q_num++) {
    $html = scraperWiki::scrape("http://questions.assemblee-nationale.fr/q13/13-" . $q_num . "QE.htm");
    $dom = new simple_html_dom();
    $dom->load($html);
    foreach ($dom->find(".tdstyle") as $data) {
        /*
         *  Scrapes the content of the question
         */
        foreach ($data->find('h2') as $title) {
            if ($title->plaintext == " Texte de la question") {
                foreach ($data->find('.contenutexte') as $contenutexte) {
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:31,代码来源:questions_a_lassemblee_nationale.php

示例15: array

                $daily['Asar'] = $tr->find('td', 6)->plaintext;
                $daily['Maghrib'] = $tr->find('td', 7)->plaintext;
                $daily['Isyak'] = $tr->find('td', 8)->plaintext;
                $rows[] = $daily;
            }
            $day++;
        }
        $html->clear();
    }
    scraperwiki::save_sqlite(array('Zone', 'Tarikh'), $rows, $table_name = "solat");
}
require 'scraperwiki/simple_html_dom.php';
$zones = array(array('Zone' => 'JHR02', 'Negeri' => 'JOHOR', 'Lokasi' => 'Kota Tinggi, Mersing, Johor Bahru'), array('Zone' => 'JHR04', 'Negeri' => 'JOHOR', 'Lokasi' => 'Batu Pahat, Muar, Segamat, Gemas'), array('Zone' => 'JHR03', 'Negeri' => 'JOHOR', 'Lokasi' => 'Kluang dan Pontian'), array('Zone' => 'JHR01', 'Negeri' => 'JOHOR', 'Lokasi' => 'Pulau Aur dan Pemanggil'), array('Zone' => 'KDH06', 'Negeri' => 'KEDAH', 'Lokasi' => 'Puncak Gunung Jerai'), array('Zone' => 'KDH01', 'Negeri' => 'KEDAH', 'Lokasi' => 'Kota Setar, Kubang Pasu, Pokok Sena'), array('Zone' => 'KDH05', 'Negeri' => 'KEDAH', 'Lokasi' => 'Langkawi'), array('Zone' => 'KDH02', 'Negeri' => 'KEDAH', 'Lokasi' => 'Pendang, Kuala Muda, Yan'), array('Zone' => 'KDH03', 'Negeri' => 'KEDAH', 'Lokasi' => 'Padang Terap, Sik, Baling'), array('Zone' => 'KDH04', 'Negeri' => 'KEDAH', 'Lokasi' => 'Kulim, Bandar Baharu'), array('Zone' => 'KTN03', 'Negeri' => 'KELANTAN', 'Lokasi' => 'Jeli, Gua Musang (Mukim Galas, Bertam)'), array('Zone' => 'KTN01', 'Negeri' => 'KELANTAN', 'Lokasi' => 'K.Bharu,Bachok,Pasir Puteh,Tumpat,Pasir Mas,Tnh. Merah,Machang,Kuala Krai,Mukim Chiku'), array('Zone' => 'MLK01', 'Negeri' => 'MELAKA', 'Lokasi' => 'Bandar Melaka, Alor Gajah, Jasin, Masjid Tanah, Merlimau, Nyalas'), array('Zone' => 'NGS02', 'Negeri' => 'NEGERI SEMBILAN', 'Lokasi' => 'Port Dickson, Seremban, Kuala Pilah, Jelebu, Rembau'), array('Zone' => 'NGS01', 'Negeri' => 'NEGERI SEMBILAN', 'Lokasi' => 'Jempol, Tampin'), array('Zone' => 'PHG05', 'Negeri' => 'PAHANG', 'Lokasi' => 'Genting Sempah, Janda Baik, Bukit Tinggi'), array('Zone' => 'PHG04', 'Negeri' => 'PAHANG', 'Lokasi' => 'Bentong, Raub, Kuala Lipis'), array('Zone' => 'PHG03', 'Negeri' => 'PAHANG', 'Lokasi' => 'Maran, Chenor, Temerloh, Bera, Jerantut'), array('Zone' => 'PHG06', 'Negeri' => 'PAHANG', 'Lokasi' => 'Bukit Fraser, Genting Higlands, Cameron Higlands'), array('Zone' => 'PHG02', 'Negeri' => 'PAHANG', 'Lokasi' => 'Kuantan, Pekan, Rompin, Muadzam Shah'), array('Zone' => 'PHG01', 'Negeri' => 'PAHANG', 'Lokasi' => 'Pulau Tioman'), array('Zone' => 'PRK07', 'Negeri' => 'PERAK', 'Lokasi' => 'Bukit Larut'), array('Zone' => 'PRK02', 'Negeri' => 'PERAK', 'Lokasi' => 'Ipoh, Batu Gajah, Kampar, Sg. Siput dan Kuala Kangsar'), array('Zone' => 'PRK01', 'Negeri' => 'PERAK', 'Lokasi' => 'Tapah,Slim River dan Tanjung Malim'), array('Zone' => 'PRK03', 'Negeri' => 'PERAK', 'Lokasi' => 'Pengkalan Hulu, Grik dan Lenggong '), array('Zone' => 'PRK04', 'Negeri' => 'PERAK', 'Lokasi' => 'Temengor dan Belum'), array('Zone' => 'PRK05', 'Negeri' => 'PERAK', 'Lokasi' => 'Teluk Intan, Bagan Datoh, Kg.Gajah,Sri Iskandar, Beruas,Parit,Lumut,Setiawan dan Pulau Pangkor'), array('Zone' => 'PRK06', 'Negeri' => 'PERAK', 'Lokasi' => 'Selama, Taiping, Bagan Serai dan Parit Buntar'), array('Zone' => 'PLS01', 'Negeri' => 'PERLIS', 'Lokasi' => 'Kangar, Padang Besar, Arau'), array('Zone' => 'PNG01', 'Negeri' => 'PULAU PINANG', 'Lokasi' => 'Seluruh Negeri Pulau Pinang'), array('Zone' => 'SBH09', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 9 - Sipitang, Membakut, Beaufort, Kuala Penyu, Weston, Tenom, Long Pa Sia'), array('Zone' => 'SBH08', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 8 - Pensiangan, Keningau, Tambunan, Nabawan'), array('Zone' => 'SBH07', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 7 - Papar, Ranau, Kota Belud, Tuaran, Penampang, Kota Kinabalu'), array('Zone' => 'SBH06', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 6 - Gunung Kinabalu'), array('Zone' => 'SBH05', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 5 - Kudat, Kota Marudu, Pitas, Pulau Banggi'), array('Zone' => 'SBH03', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 3 - Lahad Datu, Kunak, Silabukan, Tungku, Sahabat, Semporna'), array('Zone' => 'SBH02', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 2 - Pinangah, Terusan, Beluran, Kuamut, Telupit'), array('Zone' => 'SBH01', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 1 - Sandakan, Bdr. Bkt. Garam, Semawang, Temanggong, Tambisan'), array('Zone' => 'SBH04', 'Negeri' => 'SABAH', 'Lokasi' => 'Zon 4 - Tawau, Balong, Merotai, Kalabakan'), array('Zone' => 'SWK01', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 1 - Limbang, Sundar, Terusan, Lawas'), array('Zone' => 'SWK08', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 8 - Kuching, Bau, Lundu,Sematan'), array('Zone' => 'SWK07', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 7 - Samarahan, Simunjan, Serian, Sebuyau, Meludam'), array('Zone' => 'SWK06', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 6 - Kabong, Lingga, Sri Aman, Engkelili, Betong, Spaoh, Pusa, Saratok, Roban, Debak'), array('Zone' => 'SWK05', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 5 - Belawai, Matu, Daro, Sarikei, Julau, Bitangor, Rajang'), array('Zone' => 'SWK04', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 4 - Igan, Kanowit, Sibu, Dalat, Oya'), array('Zone' => 'SWK03', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 3 - Song, Belingan, Sebauh, Bintulu, Tatau, Kapit'), array('Zone' => 'SWK02', 'Negeri' => 'SARAWAK', 'Lokasi' => 'Zon 2 - Niah, Belaga, Sibuti, Miri, Bekenu, Marudi'), array('Zone' => 'SGR01', 'Negeri' => 'SELANGOR DAN WILAYAH PERSEKUTUAN', 'Lokasi' => 'Gombak,H.Selangor,Rawang,H.Langat,Sepang,Petaling,S.Alam'), array('Zone' => 'SGR02', 'Negeri' => 'SELANGOR DAN WILAYAH PERSEKUTUAN', 'Lokasi' => 'Sabak Bernam, Kuala Selangor, Klang, Kuala Langat'), array('Zone' => 'SGR03', 'Negeri' => 'SELANGOR DAN WILAYAH PERSEKUTUAN', 'Lokasi' => 'Kuala Lumpur'), array('Zone' => 'SGR04', 'Negeri' => 'SELANGOR DAN WILAYAH PERSEKUTUAN', 'Lokasi' => 'Putrajaya'), array('Zone' => 'TRG01', 'Negeri' => 'TERENGGANU', 'Lokasi' => 'Kuala Terengganu, Marang'), array('Zone' => 'TRG04', 'Negeri' => 'TERENGGANU', 'Lokasi' => 'Kemaman Dungun'), array('Zone' => 'TRG03', 'Negeri' => 'TERENGGANU', 'Lokasi' => 'Hulu Terengganu'), array('Zone' => 'TRG02', 'Negeri' => 'TERENGGANU', 'Lokasi' => 'Besut, Setiu'), array('Zone' => 'WLY02', 'Negeri' => 'WILAYAH PERSEKUTUAN LABUAN', 'Lokasi' => 'Labuan'));
scraperwiki::sqliteexecute('CREATE TABLE IF NOT EXISTS `zone` (`Zone` text,`Negeri` text,`Lokasi` text)');
scraperwiki::save_sqlite(array('Zone', 'Negeri'), $zones, $table_name = "zone");
scraperwiki::sqliteexecute('CREATE TABLE IF NOT EXISTS `solat` (`Zone` text, `Tarikh` text, `Hari` text,`Imsak` text,`Subuh` text,  `Syuruk` text, `Zohor` text, `Asar` text, `Maghrib` text, `Isyak` text )');
foreach ($zones as $zone) {
    $rows = array();
    for ($month = 1; $month <= 12; $month++) {
        $html = scraperWiki::scrape('http://www.e-solat.gov.my/web/waktusolat.php?zone=' . $zone['Zone'] . '&state=&year=' . date('Y') . '&jenis=year&bulan=' . $month . '&LG=BM');
        echo $html;
        $dom = new simple_html_dom();
        $dom->load($html);
        $trs = $dom->find('table', 9)->find('tr');
        echo $trs;
        $day = 0;
        foreach ($trs as $tr) {
            if ($day > 0 && $day < count($trs) - 2) {
                $daily = array();
                $daily['Zone'] = $zone['Zone'];
                $daily['Tarikh'] = date('Y') . '-' . str_pad($month, 2, '0', STR_PAD_LEFT) . '-' . str_pad($day, 2, '0', STR_PAD_LEFT);
开发者ID:flyeven,项目名称:scraperwiki-scraper-vault,代码行数:31,代码来源:esolat_10.php


注:本文中的scraperwiki::sqliteexecute方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。