当前位置: 首页>>代码示例>>Python>>正文


Python util.get_dbconn函数代码示例

本文整理汇总了Python中pyiem.util.get_dbconn函数的典型用法代码示例。如果您正苦于以下问题:Python get_dbconn函数的具体用法?Python get_dbconn怎么用?Python get_dbconn使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。


在下文中一共展示了get_dbconn函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: main

def main():
    """Go!"""
    nt = NetworkTable("IA_ASOS")
    pgconn = get_dbconn('iem')
    df = read_sql("""
    SELECT id, valid, tmpf::int as tmpf, dwpf::int as dwpf,
    sknt from current_log c JOIN stations t
    on (c.iemid = t.iemid) WHERE t.network = 'IA_ASOS' and
    c.valid > 'TODAY' and c.tmpf > 70 ORDER by id ASC
    """, pgconn, index_col=None)

    pgconn = get_dbconn('asos')
    for _, row in df.iterrows():
        df2 = read_sql("""
            SELECT valid, tmpf, dwpf, sknt from alldata WHERE station = %s
            and valid < '2018-10-03' and tmpf::int >= %s and dwpf::int >= %s
            and sknt >= %s ORDER by valid DESC
        """, pgconn, params=(row['id'], row['tmpf'], row['dwpf'], row['sknt']),
                       index_col=None)
        if len(df2.index) > 5:
            continue
        lastdate = None
        if not df2.empty:
            lastdate = df2.iloc[0]['valid'].date()
        print(
            ("%s,%s,%s,%s,%.0f,%s,%s,%s"
             ) % (row['id'], row['valid'], row['tmpf'], row['dwpf'],
                  row['sknt'], len(df2.index), lastdate,
                  nt.sts[row['id']]['archive_begin'].year)
        )
开发者ID:akrherz,项目名称:DEV,代码行数:30,代码来源:181003_extreme.py

示例2: main

def main(argv):
    """Go Main Go."""
    table = argv[1]
    nt = NetworkTable(["WFO", "RFC", "NWS", "NCEP", "CWSU", "WSO"])
    pgconn = get_dbconn('afos', user='mesonet')
    mpgconn = get_dbconn('mesosite')
    cursor = pgconn.cursor()
    mcursor = mpgconn.cursor()
    df = read_sql("""
        SELECT source, count(*) from """ + table + """
        WHERE source is not null GROUP by source ORDER by source
    """, pgconn, index_col='source')
    for source, row in df.iterrows():
        if source[0] not in ['K', 'P']:
            continue
        if source in UNKNOWN:
            continue
        iemsource = source[1:] if source[0] == 'K' else source
        if iemsource in nt.sts:
            continue
        if source in XREF:
            cursor.execute("""
                UPDATE """ + table + """ SET source = %s WHERE source = %s
            """, (XREF[source], source))
            print(("Correcting %s -> %s, %s rows"
                   ) % (source, XREF[source], cursor.rowcount))
        else:
            if row['count'] < 10:
                print("skipping %s as row count is low" % (source, ))
                continue
            mcursor.execute("""
                WITH centers as (
                    select id, geom::geography from stations where network in
                    ('WFO', 'RFC', 'NWS', 'NCEP', 'CWSU', 'WSO')
                ), asos as (
                    SELECT geom::geography from stations where id = %s
                    and network ~* 'ASOS'
                )
                SELECT c.id as center, st_distance(c.geom, a.geom)
                from centers c, asos a ORDER by st_distance ASC
            """, (iemsource, ))
            if mcursor.rowcount < 5:
                print("Source: %s is double unknown" % (source, ))
                continue
            for i, row2 in enumerate(mcursor):
                print("%s %s %.2f" % (source, row2[0], row2[1]))
                if i > 4:
                    break
            newval = input(
                "What do you want to do with %s (count:%s)? " % (
                    source, row['count']))
            if len(newval) == 4:
                XREF[source] = newval
            else:
                UNKNOWN.append(source)

    print(json.dumps(XREF, indent=4))
    print(UNKNOWN)
    cursor.close()
    pgconn.commit()
开发者ID:akrherz,项目名称:DEV,代码行数:60,代码来源:correct_wmo_source.py

示例3: main

def main(argv):
    """Go Main Go."""
    afostable = argv[1]
    stations = load_stations()
    iem_pgconn = get_dbconn('iem')
    icursor = iem_pgconn.cursor()
    pgconn = get_dbconn('afos')
    cursor = pgconn.cursor()
    cursor.execute("""
        SELECT entered, data from """ + afostable + """
        WHERE substr(pil, 1, 3) = 'DSM'
        ORDER by entered
    """)
    updates = 0
    for row in cursor:
        if row[1].startswith("\001"):
            try:
                dsm = parser(row[1], utcnow=row[0])
                dsm.tzlocalize(stations)
            except Exception as exp:
                print(exp)
                print(row[1])
                continue
            dsm.sql(icursor)
        else:
            try:
                dsm = process(row[1])
                if dsm is None:
                    continue
                dsm.compute_times(row[0])
                dsm.tzlocalize(stations[dsm.station])
            except Exception as exp:
                print(exp)
                print(row[1])
                continue
            # print(row[1])
            dsm.sql(icursor)
            # print("%s %s %s/%s %s\n\n" % (
            #    dsm.station, dsm.date, dsm.groupdict['high'],
            #    dsm.groupdict['low'], dsm.groupdict['pday']))
        updates += 1
        if updates % 1000 == 0:
            icursor.close()
            iem_pgconn.commit()
            icursor = iem_pgconn.cursor()

    icursor.close()
    iem_pgconn.commit()
开发者ID:akrherz,项目名称:DEV,代码行数:48,代码来源:fix_asos_dsm_gusts.py

示例4: get_data

def get_data():
    """The data we want and the data we need"""
    pgconn = get_dbconn('coop', user='nobody')
    df = read_sql("""
        select year, week_ending, num_value, state_alpha from nass_quickstats
        where commodity_desc = 'CORN' and statisticcat_desc = 'PROGRESS'
        and unit_desc = 'PCT SILKING' and
        util_practice_desc = 'ALL UTILIZATION PRACTICES'
        and num_value is not null
        ORDER by state_alpha, week_ending
    """, pgconn, index_col=None)
    df['week_ending'] = pd.to_datetime(df['week_ending'])
    data = {}
    for state, gdf in df.groupby('state_alpha'):
        sdf = gdf.copy()
        sdf.set_index('week_ending', inplace=True)
        newdf = sdf.resample('D').interpolate(method='linear')
        y10 = newdf[newdf['year'] > 2007]
        doyavgs = y10.groupby(y10.index.strftime("%m%d")).mean()
        lastdate = pd.Timestamp(newdf.index.values[-1]).to_pydatetime()
        data[state] = {'date': lastdate,
                       'avg': doyavgs.at[lastdate.strftime("%m%d"),
                                         'num_value'],
                       'd2017': newdf.at[lastdate,
                                         'num_value']}
        print("%s %s" % (state, data[state]))
    return data
开发者ID:akrherz,项目名称:DEV,代码行数:27,代码来源:corn_map.py

示例5: main

def main():
    """Ingest things from Gio"""
    pgconn = get_dbconn('td')
    cursor = pgconn.cursor()

    df = pd.read_csv(sys.argv[1])
    uniqueid = sys.argv[2]

    cursor.execute("""
        DELETE from watertable_data where uniqueid = %s and
        valid between %s and %s
    """, (uniqueid, df['Date'].min(), df['Date'].max()))
    deleted = cursor.rowcount
    if deleted > 0:
        print("Removed %s" % (deleted,))

    inserts = 0
    for _idx, row in df.iterrows():
        if row['Date'] == ' ' or row['Date'] is None:
            continue
        cursor.execute("""
        INSERT into watertable_data
        (uniqueid, plotid, valid, depth_mm_qc, depth_mm)
        VALUES (%s, %s, %s, %s, %s)
        """, (uniqueid, row['plot'], row['Date'], row.get('WAT4'),
              row.get('WAT4')))
        inserts += 1
    print("Inserted %s, Deleted %s entries for %s" % (inserts, deleted,
                                                      uniqueid))
    cursor.close()
    pgconn.commit()
    pgconn.close()
开发者ID:akrherz,项目名称:datateam,代码行数:32,代码来源:harvest_watertable_csv.py

示例6: main

def main():
    """Go"""
    pgconn = get_dbconn("afos")

    acursor = pgconn.cursor()

    payload = getattr(sys.stdin, 'buffer', sys.stdin).read()
    payload = payload.decode('ascii', errors='ignore')
    data = payload.replace("\r\r\n", "z")

    tokens = re.findall(r"(\.A [A-Z0-9]{3} .*?=)", data)

    utcnow = datetime.datetime.utcnow()
    gmt = utcnow.replace(tzinfo=pytz.utc)
    gmt = gmt.replace(second=0)

    table = "products_%s_0106" % (gmt.year,)
    if gmt.month > 6:
        table = "products_%s_0712" % (gmt.year,)

    for token in tokens:
        # print(tokens)
        sql = """
        INSERT into """ + table + """
        (pil, data, entered) values(%s,%s,%s)
        """
        sqlargs = ("%s%s" % ('RR7', token[3:6]), token.replace("z", "\n"), gmt)
        acursor.execute(sql, sqlargs)

    acursor.close()
    pgconn.commit()
    pgconn.close()
开发者ID:akrherz,项目名称:pyWWA,代码行数:32,代码来源:rr7.py

示例7: main

def main():
    """Go Main Go."""
    nt = NetworkTable(["WFO", "CWSU"])
    df = read_sql("""
    SELECT screen_name, access_token, access_token_secret
    from iembot_twitter_oauth
    WHERE access_token is not null
    """, get_dbconn('mesosite'), index_col='screen_name')

    wfos = list(nt.sts.keys())
    wfos.sort()
    for wfo in wfos:
        username = "iembot_%s" % (wfo.lower()[-3:], )
        if username not in df.index:
            print("%s is unknown?" % (username, ))
            continue
        api = twitter.Api(
            consumer_key=PROPS['bot.twitter.consumerkey'],
            consumer_secret=PROPS['bot.twitter.consumersecret'],
            access_token_key=df.at[username, 'access_token'],
            access_token_secret=df.at[username, 'access_token_secret'])

        location = "%s, %s" % (nt.sts[wfo]['name'], nt.sts[wfo]['state'])
        desc = (
            'Syndication of National Weather Service Office %s. '
            'Unmonitored, contact @akrherz who developed this.'
        ) % (location, )
        print("len(desc) = %s" % (len(desc), ))
        profileURL = "https://mesonet.agron.iastate.edu/projects/iembot/"
        twuser = api.UpdateProfile(
            description=desc, profileURL=profileURL,
            location=location)
        # twuser.AsDict()['followers_count']
        print("%s %s" % (username, location))
开发者ID:akrherz,项目名称:DEV,代码行数:34,代码来源:update_iembot_profiles.py

示例8: __init__

    def __init__(self, network, cursor=None):
        """A class representing a network(s) of IEM metadata

        Args:
          network (str or list): A network identifier used by the IEM, this can
            be either a string or a list of strings.
          cursor (dbcursor,optional): A database cursor to use for the query
        """
        self.sts = OrderedDict()
        if network is None:
            return

        if cursor is None:
            dbconn = get_dbconn('mesosite', user='nobody')
            cursor = dbconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
        if isinstance(network, str):
            network = [network, ]
        cursor.execute("""
            WITH myattrs as (
                SELECT a.iemid, array_agg(attr) as attrs,
                array_agg(value) as attr_values from stations s JOIN
                station_attributes a on (s.iemid = a.iemid) WHERE
                s.network in %s GROUP by a.iemid
            )
            SELECT s.*, ST_x(geom) as lon, ST_y(geom) as lat,
            a.attrs, a.attr_values
            from stations s LEFT JOIN myattrs a
            on (s.iemid = a.iemid)
            WHERE network in %s ORDER by name ASC
            """, (tuple(network), tuple(network)))
        for row in cursor:
            self.sts[row['id']] = dict(row)
            self.sts[row['id']]['attributes'] = dict(
                zip(row['attrs'] or [], row['attr_values'] or []))
开发者ID:akrherz,项目名称:pyIEM,代码行数:34,代码来源:network.py

示例9: main

def main():
    """Go Main Go"""
    pgconn = get_dbconn('asos')
    dfin = read_sql("""
    with mob as (
        select date_trunc('hour', valid) as ts, avg(dwpf) from alldata
        where station = 'MOB' and dwpf is not null GROUP by ts),
    cmi as (
        select date_trunc('hour', valid) as ts, avg(dwpf) from alldata
        where station = 'CMI' and dwpf is not null GROUP by ts),
    agg as (
        select m.ts, m.avg as dwpf, c.avg as tmpf
        from mob m JOIN cmi c on (m.ts = c.ts))
    select extract(month from ts) as month, extract(hour from ts) as hour,
    sum(case when dwpf >= tmpf then 1 else 0 end) / count(*)::float * 100.
    as freq from agg GROUP by month, hour ORDER by month, hour
    """, pgconn, index_col=None)

    df = dfin.pivot("month", "hour", "freq")

    fig, ax = plt.subplots(figsize=(9, 6))
    ax.set_title(("Hourly Frequency of Mobile (MOB) Dew Point\n"
                  "greater than or equal to Champaign (CMI) Dew Point"))
    sns.heatmap(df, annot=True, fmt=".0f", linewidths=.5, ax=ax, vmin=5, vmax=100)
    print(ax.get_yticks())
    ax.set_xlabel("Hour of Day (CDT or CST)")
    ax.set_xticklabels(["Mid", "1AM", "2", "3", "4", "5", "6", "7", "8", "9", "10",
                   "11", "Noon", "1PM", "2", "3", "4", "5", "6", "7", "8", "9", "10",
                   "11"])
    ax.set_yticklabels(calendar.month_abbr[1:])
    fig.savefig('test.png')
开发者ID:akrherz,项目名称:DEV,代码行数:31,代码来源:heatmap.py

示例10: loadqc

def loadqc(cursor=None, date=None):
    """ Load the current IEM Tracker QC'd variables

    Args:
      cursor (cursor,optional): Optionally provided database cursor
      date (date,optional): Defaults to today, which tickets are valid for now
    """
    if date is None:
        date = datetime.date.today()
    qdict = {}
    if cursor is None:
        portfolio = get_dbconn('portfolio', user='nobody')
        cursor = portfolio.cursor()

    cursor.execute("""
        select s_mid, sensor, status from tt_base
        WHERE sensor is not null
        and date(entered) <= %s and
        (status != 'CLOSED' or closed > %s)
        and s_mid is not null
    """, (date, date))
    for row in cursor:
        sid = row[0]
        if row[0] not in qdict:
            qdict[sid] = {}
        for vname in row[1].split(","):
            qdict[sid][vname.strip()] = True
    return qdict
开发者ID:akrherz,项目名称:pyIEM,代码行数:28,代码来源:tracker.py

示例11: main

def main(argv):
    """Go Main Go"""
    basedir = "/mesonet/data/prism"
    outdir = "swatfiles_prism_arealaverage"
    if os.path.isdir(outdir):
        print("ABORT: as %s exists" % (outdir, ))
        return
    os.mkdir(outdir)
    for dirname in ['precipitation', 'temperature']:
        os.mkdir("%s/%s" % (outdir, dirname))
    pgconn = get_dbconn('idep')
    huc8df = gpd.GeoDataFrame.from_postgis("""
    SELECT huc8, ST_Transform(simple_geom, %s) as geo from wbd_huc8
    WHERE swat_use ORDER by huc8
    """, pgconn, params=(PROJSTR,), index_col='huc8', geom_col='geo')
    hucs = huc8df.index.values
    years = range(1981, 2018)
    nc = netCDF4.Dataset("%s/%s_daily.nc" % (basedir, years[0]))

    # compute the affine
    ncaffine = Affine(nc.variables['lon'][1] - nc.variables['lon'][0],
                      0.,
                      nc.variables['lon'][0],
                      0.,
                      nc.variables['lat'][0] - nc.variables['lat'][1],
                      nc.variables['lat'][-1])
    czs = CachingZonalStats(ncaffine)
    nc.close()

    fps = []
    for year in years:
        nc = netCDF4.Dataset("%s/%s_daily.nc" % (basedir, year))
        basedate, timesz = get_basedate(nc)
        for i in tqdm(range(timesz), desc=str(year)):
            # date = basedate + datetime.timedelta(days=i)

            # keep array logic in top-down order
            tasmax = np.flipud(nc.variables['tmax'][i, :, :])
            tasmin = np.flipud(nc.variables['tmin'][i, :, :])
            pr = np.flipud(nc.variables['ppt'][i, :, :])
            mytasmax = czs.gen_stats(tasmax, huc8df['geo'])
            mytasmin = czs.gen_stats(tasmin, huc8df['geo'])
            mypr = czs.gen_stats(pr, huc8df['geo'])
            for j, huc12 in enumerate(hucs):
                if i == 0 and year == years[0]:
                    fps.append([open(('%s/precipitation/P%s.txt'
                                      ) % (outdir, huc12), 'w'),
                                open(('%s/temperature/T%s.txt'
                                      ) % (outdir, huc12), 'w')])
                    fps[j][0].write("%s\n" % (basedate.strftime("%Y%m%d"), ))
                    fps[j][1].write("%s\n" % (basedate.strftime("%Y%m%d"), ))

                fps[j][0].write(("%.1f\n"
                                 ) % (mypr[j], ))
                fps[j][1].write(("%.2f,%.2f\n"
                                 ) % (mytasmax[j], mytasmin[j]))

    for fp in fps:
        fp[0].close()
        fp[1].close()
开发者ID:akrherz,项目名称:DEV,代码行数:60,代码来源:prism2swat.py

示例12: main

def main():
    """Go"""
    pgconn = get_dbconn('postgis')
    df = read_sql("""
    with data as (
        select distinct date(issue), wfo, eventid from warnings
        where phenomena = 'TO' and significance = 'W' and issue > '1996-01-01')
    select wfo, date, count(*) from data GROUP by wfo, date
    ORDER by wfo, date
    """, pgconn, index_col=None)
    rows = []
    for wfo, df2 in df.groupby(by='wfo'):
        maxdate = df2['date'].max()
        mindate = df2['date'].min()
        data = [0] * ((maxdate - mindate).days + 4)
        for _, row in df2.iterrows():
            data[(row['date'] - mindate).days] = row['count']
        for i in range(0, len(data) - 2):
            if sum(data[i:i+3]) > 50 or wfo in COMPAREWFOS:
                date = mindate + datetime.timedelta(days=i)
                rows.append(dict(wfo=wfo, date=date, count=sum(data[i:i+3]),
                                 one=data[i], two=data[i+1], three=data[i+2]))

    df = pd.DataFrame(rows)
    df.sort_values('count', ascending=False, inplace=True)
    for _, row in df.head(15).iterrows():
        printr(row)
    for wfo in COMPAREWFOS:
        df2 = df[df['wfo'] == wfo]
        printr(df2.iloc[0])
开发者ID:akrherz,项目名称:DEV,代码行数:30,代码来源:threeday.py

示例13: main

def main():
    """Go"""
    pgconn = get_dbconn('iem')
    cursor = pgconn.cursor()

    cursor.execute("""
    with data as (
        select distinct t.iemid,
        date_trunc('hour', valid + '10 minutes'::interval) as v from
        current_log c JOIN stations t on (c.iemid = t.iemid)
        where raw ~* ' FU ' and t.country = 'US')
    SELECT v, count(*) from data GROUP by v ORDER by v ASC
    """)
    xs = []
    ys = []
    for row in cursor:
        xs.append(row[0])
        ys.append(row[1])

    (fig, ax) = plt.subplots(1, 1)
    ax.bar(xs, ys, width=(1./24.))
    ax.grid(True)
    ax.set_ylabel("Number of ASOS/METAR Sites")
    ax.set_xlabel("3-5 July 2017 Valid Time (Central Daylight Time)")
    ax.set_title(("Number of ASOS/METAR Sites Reporting Smoke (FU)\n"
                  "based on METAR reports for the United States processed by IEM"))
    ax.xaxis.set_major_formatter(mdates.DateFormatter('%-I %p\n%-d %b',
                                                      tz=pytz.timezone("America/Chicago")))
    ax.set_position([0.1, 0.15, 0.8, 0.75])
    fig.savefig('test.png')
开发者ID:akrherz,项目名称:DEV,代码行数:30,代码来源:smoke.py

示例14: main

def main():
    """Go Main"""
    pgconn = get_dbconn('postgis')
    df = read_postgis("""
    select geom, issue from sbw where wfo = 'PUB' and phenomena = 'TO'
    and significance = 'W' and status = 'NEW' and issue > '2007-10-01'
    and issue < '2019-01-01'
    """, pgconn, geom_col='geom', crs={'init': 'epsg:4326', 'no_defs': True})

    bounds = df['geom'].total_bounds
    # bounds = [-102.90293903,   40.08745967,  -97.75622311,   43.35172981]
    bbuf = 0.25
    mp = MapPlot(
        sector='custom', west=bounds[0] - bbuf,
        south=bounds[1] - bbuf,
        east=bounds[2] + bbuf, north=bounds[3] + bbuf,
        continentalcolor='white',  # '#b3242c',
        title='NWS Pueblo Issued Tornado Warnings [2008-2018]',
        subtitle='%s warnings plotted' % (len(df.index), ))
    crs_new = ccrs.Mercator()
    crs = ccrs.PlateCarree()
    new_geometries = [crs_new.project_geometry(ii, src_crs=crs)
                      for ii in df['geom'].values]
    # mp.draw_cwas()
    mp.ax.add_geometries(new_geometries, crs=crs_new, lw=0.5,
                         edgecolor='red', facecolor='None', alpha=1,
                         zorder=5)
    mp.drawcounties()
    mp.postprocess(filename='test.png')
开发者ID:akrherz,项目名称:DEV,代码行数:29,代码来源:nws_warning_plot.py

示例15: main

def main():
    """Go!"""
    pgconn = get_dbconn('afos')
    cursor = pgconn.cursor()

    payload = getattr(sys.stdin, 'buffer', sys.stdin).read()
    prod = product.TextProduct(payload.decode('ascii', errors='ignore'))
    prod.valid = prod.valid.replace(second=0, minute=0, microsecond=0)
    offset = prod.unixtext.find(prod.afos[:3]) + 7
    sections = re.split("\n\n", prod.unixtext[offset:])

    table = "products_%s_0106" % (prod.valid.year,)
    if prod.valid.month > 6:
        table = "products_%s_0712" % (prod.valid.year,)

    for sect in sections:
        if sect[1:4].strip() == "":
            continue
        # print("%s%s %s %s %s" % (prod.afos[:3], sect[1:4], prod.source,
        #                          prod.valid, prod.wmo))
        cursor.execute("""
            INSERT into """+table+"""
            (pil, data, source, entered, wmo) values (%s, %s, %s, %s, %s)
        """, (prod.afos[:3] + sect[1:4], prod.text[:offset] + sect,
              prod.source, prod.valid, prod.wmo))

    cursor.close()
    pgconn.commit()
    pgconn.close()
开发者ID:akrherz,项目名称:pyWWA,代码行数:29,代码来源:split_mav.py


注:本文中的pyiem.util.get_dbconn函数示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。