本文整理汇总了Python中sentry.utils.db.is_postgres函数的典型用法代码示例。如果您正苦于以下问题:Python is_postgres函数的具体用法?Python is_postgres怎么用?Python is_postgres使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了is_postgres函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get_group_tag_value_count
def get_group_tag_value_count(self, project_id, group_id, environment_id, key):
if db.is_postgres():
# This doesnt guarantee percentage is accurate, but it does ensure
# that the query has a maximum cost
using = router.db_for_read(models.GroupTagValue)
cursor = connections[using].cursor()
cursor.execute(
"""
SELECT SUM(t)
FROM (
SELECT times_seen as t
FROM sentry_messagefiltervalue
WHERE group_id = %s
AND key = %s
ORDER BY last_seen DESC
LIMIT 10000
) as a
""", [group_id, key]
)
return cursor.fetchone()[0] or 0
cutoff = timezone.now() - timedelta(days=7)
return models.GroupTagValue.objects.filter(
group_id=group_id,
key=key,
last_seen__gte=cutoff,
).aggregate(t=Sum('times_seen'))['t']
示例2: create_counter_function
def create_counter_function(db, created_models, **kwargs):
if not is_postgres(db):
return
if Counter not in created_models:
return
cursor = connections[db].cursor()
cursor.execute(
'''
create or replace function sentry_increment_project_counter(
project bigint, delta int) returns int as $$
declare
new_val int;
begin
loop
update sentry_projectcounter set value = value + delta
where project_id = project
returning value into new_val;
if found then
return new_val;
end if;
begin
insert into sentry_projectcounter(project_id, value)
values (project, delta)
returning value into new_val;
return new_val;
exception when unique_violation then
end;
end loop;
end
$$ language plpgsql;
'''
)
示例3: get_top_values
def get_top_values(cls, group_id, key, limit=3):
if db.is_postgres():
# This doesnt guarantee percentage is accurate, but it does ensure
# that the query has a maximum cost
return list(
cls.objects.raw(
"""
SELECT *
FROM (
SELECT *
FROM sentry_messagefiltervalue
WHERE group_id = %%s
AND key = %%s
ORDER BY last_seen DESC
LIMIT 10000
) as a
ORDER BY times_seen DESC
LIMIT %d
"""
% limit,
[group_id, key],
)
)
cutoff = timezone.now() - timedelta(days=7)
return list(cls.objects.filter(group=group_id, key=key, last_seen__gte=cutoff).order_by("-times_seen")[:limit])
示例4: create_default_project
def create_default_project(id, name, slug, verbosity=2, **kwargs):
if Project.objects.filter(id=id).exists():
return
try:
user = User.objects.filter(is_superuser=True)[0]
except IndexError:
user, _ = User.objects.get_or_create(username="sentry", defaults={"email": "[email protected]"})
org, _ = Organization.objects.get_or_create(slug="sentry", defaults={"owner": user, "name": "Sentry"})
team, _ = Team.objects.get_or_create(organization=org, slug="sentry", defaults={"name": "Sentry"})
project = Project.objects.create(
id=id, public=False, name=name, slug=slug, team=team, organization=team.organization, **kwargs
)
# HACK: manually update the ID after insert due to Postgres
# sequence issues. Seriously, fuck everything about this.
if db.is_postgres(project._state.db):
connection = connections[project._state.db]
cursor = connection.cursor()
cursor.execute(PROJECT_SEQUENCE_FIX)
project.update_option("sentry:origins", ["*"])
if verbosity > 0:
print("Created internal Sentry project (slug=%s, id=%s)" % (project.slug, project.id))
return project
示例5: bulk_delete_objects
def bulk_delete_objects(model, limit=10000, transaction_id=None, logger=None, **filters):
connection = connections[router.db_for_write(model)]
quote_name = connection.ops.quote_name
query = []
params = []
for column, value in filters.items():
query.append('%s = %%s' % (quote_name(column), ))
params.append(value)
if db.is_postgres():
query = """
delete from %(table)s
where id = any(array(
select id
from %(table)s
where (%(query)s)
limit %(limit)d
))
""" % dict(
query=' AND '.join(query),
table=model._meta.db_table,
limit=limit,
)
elif db.is_mysql():
query = """
delete from %(table)s
where (%(query)s)
limit %(limit)d
""" % dict(
query=' AND '.join(query),
table=model._meta.db_table,
limit=limit,
)
else:
if logger is not None:
logger.warning('Using slow deletion strategy due to unknown database')
has_more = False
for obj in model.objects.filter(**filters)[:limit]:
obj.delete()
has_more = True
return has_more
cursor = connection.cursor()
cursor.execute(query, params)
has_more = cursor.rowcount > 0
if has_more and logger is not None and _leaf_re.search(model.__name__) is None:
logger.info(
'object.delete.bulk_executed',
extra=dict(
filters.items() + [
('model', model.__name__),
('transaction_id', transaction_id),
]
)
)
return has_more
示例6: iterator
def iterator(self, chunk_size=100):
if db.is_postgres():
g = self.iterator_postgres(chunk_size)
else:
g = self.iterator_generic(chunk_size)
for chunk in g:
yield chunk
示例7: forwards
def forwards(self, orm):
# Adding index on 'Group', fields ['project', 'first_release']
if is_postgres():
db.commit_transaction()
db.execute("CREATE INDEX CONCURRENTLY sentry_groupedmessage_project_id_31335ae34c8ef983 ON sentry_groupedmessage (project_id, first_release_id)")
db.start_transaction()
else:
db.create_index('sentry_groupedmessage', ['project_id', 'first_release_id'])
示例8: forwards
def forwards(self, orm):
# Removing index on 'GroupHash', fields ['hash']
if is_postgres():
try:
with transaction.atomic():
db.delete_index(u'sentry_grouphash', ['hash'])
except Exception:
pass
示例9: increment_project_counter
def increment_project_counter(project, delta=1):
"""This method primarily exists so that south code can use it."""
if delta <= 0:
raise ValueError('There is only one way, and that\'s up.')
cur = connection.cursor()
try:
if is_postgres():
cur.execute(
'''
select sentry_increment_project_counter(%s, %s)
''', [project.id, delta]
)
return cur.fetchone()[0]
elif is_sqlite():
value = cur.execute(
'''
insert or ignore into sentry_projectcounter
(project_id, value) values (%s, 0);
''', [project.id]
)
value = cur.execute(
'''
select value from sentry_projectcounter
where project_id = %s
''', [project.id]
).fetchone()[0]
while 1:
cur.execute(
'''
update sentry_projectcounter
set value = value + %s
where project_id = %s;
''', [delta, project.id]
)
changes = cur.execute(
'''
select changes();
'''
).fetchone()[0]
if changes != 0:
return value + delta
elif is_mysql():
cur.execute(
'''
insert into sentry_projectcounter
(project_id, value)
values (%s, @new_val := %s)
on duplicate key
update value = @new_val := value + %s
''', [project.id, delta, delta]
)
cur.execute('select @new_val')
return cur.fetchone()[0]
else:
raise AssertionError("Not implemented database engine path")
finally:
cur.close()
示例10: get
def get(self, request, project, key):
"""
List a Tag's Values
```````````````````
Return a list of values associated with this key. The `query`
parameter can be used to to perform a "starts with" match on
values.
:pparam string organization_slug: the slug of the organization.
:pparam string project_slug: the slug of the project.
:pparam string key: the tag key to look up.
:auth: required
"""
if key in ('release', 'user', 'filename', 'function'):
lookup_key = 'sentry:{0}'.format(key)
else:
lookup_key = key
try:
tagkey = TagKey.objects.get(
project=project,
key=lookup_key,
status=TagKeyStatus.VISIBLE,
)
except TagKey.DoesNotExist:
raise ResourceDoesNotExist
base_queryset = TagValue.objects.filter(
project=project,
key=tagkey.key,
)
query = request.GET.get('query')
if query:
if is_postgres():
# not quite optimal, but best we can do with ORM
queryset = TagValue.objects.filter(
id__in=base_queryset.order_by('-times_seen')[:10000]
)
else:
# MySQL can't handle an `IN` with a `LIMIT` clause
queryset = base_queryset
queryset = queryset.filter(value__istartswith=query)
else:
queryset = TagValue.objects.filter(
project=project,
key=tagkey.key,
)
return self.paginate(
request=request,
queryset=queryset,
order_by='-times_seen',
on_results=lambda x: serialize(x, request.user),
)
示例11: bulk_delete_objects
def bulk_delete_objects(model, group_id=None, project_id=None, limit=10000,
logger=None):
assert group_id or project_id, 'Must pass either project_id or group_id'
if group_id:
column = 'group_id'
value = group_id
elif project_id:
column = 'project_id'
value = project_id
connection = connections['default']
quote_name = connection.ops.quote_name
if logger is not None:
logger.info('Removing %r objects where %s=%r', model, column, value)
if db.is_postgres():
query = """
delete from %(table)s
where id = any(array(
select id
from %(table)s
where %(column)s = %%s
limit %(limit)d
))
""" % dict(
table=model._meta.db_table,
column=quote_name(column),
limit=limit,
)
print query
params = [value]
elif db.is_mysql():
query = """
delete from %(table)s
where %(column)s = %%s
limit %(limit)d
""" % dict(
table=model._meta.db_table,
column=quote_name(column),
limit=limit,
)
params = [value]
else:
logger.warning('Using slow deletion strategy due to unknown database')
has_more = False
for obj in model.objects.filter(project=project_id)[:limit]:
obj.delete()
has_more = True
return has_more
cursor = connection.cursor()
cursor.execute(query, params)
return cursor.rowcount > 0
示例12: create_default_project
def create_default_project(id, name, slug, verbosity=2, **kwargs):
if Project.objects.filter(id=id).exists():
return
try:
user = User.objects.filter(is_superuser=True)[0]
except IndexError:
user = None
org, _ = Organization.objects.get_or_create(
slug='sentry',
defaults={
'name': 'Sentry',
}
)
if user:
OrganizationMember.objects.get_or_create(
user=user,
organization=org,
role='owner',
)
team, _ = Team.objects.get_or_create(
organization=org,
slug='sentry',
defaults={
'name': 'Sentry',
}
)
project = Project.objects.create(
id=id,
public=False,
name=name,
slug=slug,
team=team,
organization=team.organization,
**kwargs
)
# HACK: manually update the ID after insert due to Postgres
# sequence issues. Seriously, fuck everything about this.
if db.is_postgres(project._state.db):
connection = connections[project._state.db]
cursor = connection.cursor()
cursor.execute(PROJECT_SEQUENCE_FIX)
project.update_option('sentry:origins', ['*'])
if verbosity > 0:
echo('Created internal Sentry project (slug=%s, id=%s)' % (project.slug, project.id))
return project
示例13: create_citext_extension
def create_citext_extension(db, **kwargs):
from sentry.utils.db import is_postgres
# We always need the citext extension installed for Postgres,
# and for tests, it's not always guaranteed that we will have
# run full migrations which installed it.
if is_postgres(db):
cursor = connections[db].cursor()
try:
cursor.execute('CREATE EXTENSION IF NOT EXISTS citext')
except Exception:
pass
示例14: bulk_delete_objects
def bulk_delete_objects(model, limit=10000, logger=None, using='default',
**filters):
connection = connections[using]
quote_name = connection.ops.quote_name
query = []
params = []
for column, value in filters.items():
query.append('%s = %%s' % (quote_name(column),))
params.append(value)
if logger is not None:
logger.info('Removing %r objects where %s=%r', model, column, value)
if db.is_postgres():
query = """
delete from %(table)s
where id = any(array(
select id
from %(table)s
where (%(query)s)
limit %(limit)d
))
""" % dict(
query=' AND '.join(query),
table=model._meta.db_table,
limit=limit,
)
elif db.is_mysql():
query = """
delete from %(table)s
where (%(query)s)
limit %(limit)d
""" % dict(
query=' AND '.join(query),
table=model._meta.db_table,
limit=limit,
)
else:
if logger is not None:
logger.warning('Using slow deletion strategy due to unknown database')
has_more = False
for obj in model.objects.filter(**filters)[:limit]:
obj.delete()
has_more = True
return has_more
cursor = connection.cursor()
cursor.execute(query, params)
return cursor.rowcount > 0
示例15: get_top_group_tag_values
def get_top_group_tag_values(self, project_id, group_id,
environment_id, key, limit=TOP_VALUES_DEFAULT_LIMIT):
if db.is_postgres():
environment_id = AGGREGATE_ENVIRONMENT_ID if environment_id is None else environment_id
# This doesnt guarantee percentage is accurate, but it does ensure
# that the query has a maximum cost
return list(
map(
transformers[models.GroupTagValue],
models.GroupTagValue.objects.raw(
"""
SELECT *
FROM (
SELECT tagstore_grouptagvalue.*
FROM tagstore_grouptagvalue
INNER JOIN tagstore_tagkey
ON (tagstore_grouptagvalue.key_id = tagstore_tagkey.id)
WHERE tagstore_grouptagvalue.group_id = %%s
AND tagstore_tagkey.project_id = %%s
AND tagstore_grouptagvalue.project_id = %%s
AND tagstore_tagkey.environment_id = %%s
AND tagstore_tagkey.key = %%s
ORDER BY last_seen DESC
LIMIT 10000
) as a
ORDER BY times_seen DESC
LIMIT %d
""" % limit,
[group_id, project_id, project_id, environment_id, key]
),
)
)
cutoff = timezone.now() - timedelta(days=7)
qs = models.GroupTagValue.objects.select_related('_key', '_value').filter(
project_id=project_id,
group_id=group_id,
_key__project_id=project_id,
_key__key=key,
_value__project_id=project_id,
last_seen__gte=cutoff,
)
qs = self._add_environment_filter(qs, environment_id)
return list(
map(
transformers[models.GroupTagValue],
qs.order_by('-times_seen')[:limit],
)
)