commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 1
2.94k
⌀ | new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
|
---|---|---|---|---|---|---|---|---|---|
9c7d0895dca2909143d633a1e335a811f43481b2 | examples/fabfile.py | examples/fabfile.py | """Example of integration between Fabric and Datadog.
"""
from fabric.api import *
from fabric.colors import *
from dogapi.fab import setup, notify
setup(api_key = "YOUR API KEY HERE")
# Make sure @notify is just above @task
@notify
@task(default=True, alias="success")
def sweet_task(some_arg, other_arg):
"""Always succeeds"""
print(green("My sweet task always runs properly."))
@notify
@task(alias="failure")
def boring_task(some_arg):
"""Always fails"""
print(red("My boring task is designed to fail."))
raise Exception("failure!!!")
| """Example of integration between Fabric and Datadog.
"""
from fabric.api import *
from fabric.colors import *
from dogapi.fab import setup, notify
setup(api_key = "YOUR API KEY HERE")
# Make sure @notify is just below @task
@task(default=True, alias="success")
@notify
def sweet_task(some_arg, other_arg):
"""Always succeeds"""
print(green("My sweet task always runs properly."))
@task(alias="failure")
@notify
def boring_task(some_arg):
"""Always fails"""
print(red("My boring task is designed to fail."))
raise Exception("failure!!!")
env.roledefs.update({
'webserver': ['localhost']
})
@task(alias="has_roles")
@notify
@roles('webserver')
@hosts('localhost')
def roles_task(arg_1, arg_2):
run('touch /tmp/fab_test')
| Update fabric examples to reflect changes. | Update fabric examples to reflect changes.
| Python | bsd-3-clause | DataDog/dogapi,DataDog/dogapi |
0fb7e8d901addc801fb9b99d744666f573f672d3 | billjobs/migrations/0003_auto_20160822_2341.py | billjobs/migrations/0003_auto_20160822_2341.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-22 23:41
from __future__ import unicode_literals
from django.db import migrations
def add_billing_address(apps, schema_editor):
''' Data migration add billing_address in Bill from user billing_address
field
'''
Bill = apps.get_model('billjobs', 'Bill')
for bill in Bill.objects.all():
bill.billing_address = bill.user.billing_address
bill.save()
class Migration(migrations.Migration):
dependencies = [
('billjobs', '0002_service_is_available_squashed_0005_bill_issuer_address_default'),
]
operations = [
migrations.RunPython(add_billing_address),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-22 23:41
from __future__ import unicode_literals
from django.db import migrations, models
def add_billing_address(apps, schema_editor):
''' Data migration add billing_address in Bill from user billing_address
field
'''
Bill = apps.get_model('billjobs', 'Bill')
for bill in Bill.objects.all():
bill.billing_address = bill.user.userprofile.billing_address
bill.save()
class Migration(migrations.Migration):
dependencies = [
('billjobs', '0002_service_is_available_squashed_0005_bill_issuer_address_default'),
]
operations = [
migrations.AddField(
model_name='bill',
name='billing_address',
field=models.CharField(max_length=1024),
),
migrations.RunPython(add_billing_address),
]
| Add billing_address and migrate data | Add billing_address and migrate data
| Python | mit | ioO/billjobs |
d9f035b6915a5290b7affe1853937d45214e07bc | dbaas_zabbix/__init__.py | dbaas_zabbix/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
import sys
from dbaas_zabbix.dbaas_api import DatabaseAsAServiceApi
from dbaas_zabbix.provider_factory import ProviderFactory
from pyzabbix import ZabbixAPI
LOG = logging.getLogger(__name__)
def set_integration_logger():
stream = logging.StreamHandler(sys.stdout)
stream.setLevel(logging.DEBUG)
log = logging.getLogger('pyzabbix')
log.addHandler(stream)
log.setLevel(logging.DEBUG)
def factory_for(**kwargs):
databaseinfra = kwargs['databaseinfra']
credentials = kwargs['credentials']
del kwargs['databaseinfra']
del kwargs['credentials']
zabbix_api = ZabbixAPI
if kwargs.get('zabbix_api'):
zabbix_api = kwargs.get('zabbix_api')
del kwargs['zabbix_api']
dbaas_api = DatabaseAsAServiceApi(databaseinfra, credentials)
return ProviderFactory.factory(dbaas_api, zabbix_api=zabbix_api, **kwargs)
ZABBIX_INTEGRATION_LOG = os.getenv('ZABBIX_INTEGRATION_LOG') == "1"
LOG.info('ZABBIX_INTEGRATION_LOG = ' + str(ZABBIX_INTEGRATION_LOG))
if ZABBIX_INTEGRATION_LOG:
LOG.info("Activating log stream for pyzabbix")
set_integration_logger()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import sys
from dbaas_zabbix.dbaas_api import DatabaseAsAServiceApi
from dbaas_zabbix.provider_factory import ProviderFactory
from pyzabbix import ZabbixAPI
stream = logging.StreamHandler(sys.stdout)
stream.setLevel(logging.DEBUG)
log = logging.getLogger('pyzabbix')
log.addHandler(stream)
log.setLevel(logging.DEBUG)
def factory_for(**kwargs):
databaseinfra = kwargs['databaseinfra']
credentials = kwargs['credentials']
del kwargs['databaseinfra']
del kwargs['credentials']
zabbix_api = ZabbixAPI
if kwargs.get('zabbix_api'):
zabbix_api = kwargs.get('zabbix_api')
del kwargs['zabbix_api']
dbaas_api = DatabaseAsAServiceApi(databaseinfra, credentials)
return ProviderFactory.factory(dbaas_api, zabbix_api=zabbix_api, **kwargs)
| Revert "logs pyzabbix com variavel de ambiente" | Revert "logs pyzabbix com variavel de ambiente"
This reverts commit df796462b07f7470d86f4d0622a414c956a45ced.
| Python | bsd-3-clause | globocom/dbaas-zabbix,globocom/dbaas-zabbix |
bbb4496a99a5c65218b12c56de01c12ab83a1056 | demo/recent_questions.py | demo/recent_questions.py | #!/usr/bin/env python
from __future__ import print_function
# Same directory hack
import sys
sys.path.append('.')
sys.path.append('..')
try:
get_input = raw_input
except NameError:
get_input = input
user_api_key = get_input("Please enter an API key if you have one (Return for none):")
if not user_api_key: user_api_key = None
import stackexchange, thread
so = stackexchange.Site(stackexchange.StackOverflow, app_key=user_api_key, impose_throttling=True)
so.be_inclusive()
sys.stdout.write('Loading...')
sys.stdout.flush()
questions = so.recent_questions(pagesize=10, filter='_b')
print('\r # vote ans view')
cur = 1
for question in questions:
print('%2d %3d %3d %3d \t%s' % (cur, question.score, len(question.answers), question.view_count, question.title))
cur += 1
num = int(get_input('Question no.: '))
qu = questions[num - 1]
print('--- %s' % qu.title)
print('%d votes, %d answers, %d views.' % (qu.score, len(qu.answers), qu.view_count))
print('Tagged: ' + ', '.join(qu.tags))
print()
print(qu.body[:250] + ('...' if len(qu.body) > 250 else ''))
| #!/usr/bin/env python
from __future__ import print_function
from six.moves import input
# Same directory hack
import sys
sys.path.append('.')
sys.path.append('..')
user_api_key = input("Please enter an API key if you have one (Return for none):")
if not user_api_key: user_api_key = None
import stackexchange, thread
so = stackexchange.Site(stackexchange.StackOverflow, app_key=user_api_key, impose_throttling=True)
so.be_inclusive()
sys.stdout.write('Loading...')
sys.stdout.flush()
questions = so.recent_questions(pagesize=10, filter='_b')
print('\r # vote ans view')
cur = 1
for question in questions:
print('%2d %3d %3d %3d \t%s' % (cur, question.score, len(question.answers), question.view_count, question.title))
cur += 1
num = int(get_input('Question no.: '))
qu = questions[num - 1]
print('--- %s' % qu.title)
print('%d votes, %d answers, %d views.' % (qu.score, len(qu.answers), qu.view_count))
print('Tagged: ' + ', '.join(qu.tags))
print()
print(qu.body[:250] + ('...' if len(qu.body) > 250 else ''))
| Use six function for input() in recent questions demo | Use six function for input() in recent questions demo
| Python | bsd-3-clause | Khilo84/Py-StackExchange,lucjon/Py-StackExchange,damanjitsingh/StackExchange-python- |
d358a759d86ce2a377e4fef84f20075bd0481d3b | ditto/flickr/views.py | ditto/flickr/views.py | from ..ditto.views import PaginatedListView
from .models import Account, Photo, User
class Home(PaginatedListView):
template_name = 'flickr/index.html'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['account_list'] = Account.objects.all()
return context
def get_queryset(self):
"Get Photos by all of the Accounts that have Users."
# Use select_related to fetch user details too. Could be nasty...
return Photo.public_photo_objects.all().select_related()
| from ..ditto.views import PaginatedListView
from .models import Account, Photo, User
class Home(PaginatedListView):
template_name = 'flickr/index.html'
paginate_by = 48
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['account_list'] = Account.objects.all()
return context
def get_queryset(self):
"Get Photos by all of the Accounts that have Users."
# Use select_related to fetch user details too. Could be nasty...
return Photo.public_photo_objects.all().select_related()
| Change number of photos per page | Change number of photos per page
| Python | mit | philgyford/django-ditto,philgyford/django-ditto,philgyford/django-ditto |
7560bce01be5560395dd2373e979dbee086f3c21 | py2app/converters/nibfile.py | py2app/converters/nibfile.py | """
Automatic compilation of XIB files
"""
import subprocess, os
from py2app.decorators import converts
@converts(suffix=".xib")
def convert_xib(source, destination, dry_run=0):
destination = destination[:-4] + ".nib"
if dry_run:
return
p = subprocess.Popen(['ibtool', '--compile', destination, source])
xit = p.wait()
if xit != 0:
raise RuntimeError("ibtool failed, code %d"%(xit,))
@converts(suffix=".nib")
def convert_nib(source, destination, dry_run=0):
destination = destination[:-4] + ".nib"
if dry_run:
return
p = subprocess.Popen(['ibtool', '--compile', destination, source])
xit = p.wait()
if xit != 0:
raise RuntimeError("ibtool failed, code %d"%(xit,))
| """
Automatic compilation of XIB files
"""
from __future__ import print_function
import subprocess, os
from py2app.decorators import converts
gTool = None
def _get_ibtool():
global gTool
if gTool is None:
if os.path.exists('/usr/bin/xcrun'):
gTool = subprocess.check_output(['/usr/bin/xcrun', '-find', 'ibtool'])[:-1]
else:
gTool = 'ibtool'
print (gTool)
return gTool
@converts(suffix=".xib")
def convert_xib(source, destination, dry_run=0):
destination = destination[:-4] + ".nib"
print("compile %s -> %s"%(source, destination))
if dry_run:
return
subprocess.check_call([_get_ibtool(), '--compile', destination, source])
@converts(suffix=".nib")
def convert_nib(source, destination, dry_run=0):
destination = destination[:-4] + ".nib"
print("compile %s -> %s"%(source, destination))
if dry_run:
return
subprocess.check_call([_get_ibtool, '--compile', destination, source])
| Simplify nib compiler and support recent Xcode versions by using xcrun | Simplify nib compiler and support recent Xcode versions by using xcrun
| Python | mit | metachris/py2app,metachris/py2app,metachris/py2app,metachris/py2app |
b6d08abf7bc4aafaeec59944bdcdf8ae4a9352d5 | recipe_scrapers/consts.py | recipe_scrapers/consts.py | import re
TIME_REGEX = re.compile(
r'\A(\s*(?P<hours>\d+)\s{1}(hours|hrs|hr|h))?((?P<minutes>\s*\d+)\s{1}(minutes|mins|min|m))?\Z'
)
HTML_SYMBOLS = '\xa0' #
| import re
TIME_REGEX = re.compile(
r'\A(\s*(?P<hours>\d+)\s*(hours|hrs|hr|h))?(\s*(?P<minutes>\d+)\s*(minutes|mins|min|m))?\Z'
)
HTML_SYMBOLS = '\xa0' #
| Update time_regex captcher so to work with more sites | Update time_regex captcher so to work with more sites
| Python | mit | hhursev/recipe-scraper |
c0de2a081cfe9af7f6b9d39daae557d45f5d69ee | middleware/module_yaml.py | middleware/module_yaml.py | from __future__ import unicode_literals
import os
import yaml
def main(app, data):
filepath = os.path.join(app.data_dir, data.get('filename'))
with open(filepath, 'r') as f:
contents = yaml.load(f)
return contents
| from __future__ import unicode_literals
import os
import yaml
import requests
def local(app, data):
filepath = os.path.join(app.data_dir, data.get('filename'))
with open(filepath, 'r') as f:
contents = yaml.load(f)
return contents
def remote(app, data):
r = requests.get(data.get('url'))
contents = yaml.load(r.data)
return contents
def main(app, data):
if data.get('filename'):
return local(app, data)
if data.get('url'):
return remote(app, data)
| Allow remote and local files. | Allow remote and local files.
| Python | mit | myles/me-api,myles/me-api |
cb0baa6abcb358c4f44135b3f17d02af2d1d4d06 | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: [email protected]
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
#t1 = ibmcnx.functions.getDSId( db )
print db
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: [email protected]
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
# print db
print "AdminConfig.show( db ): "
AdminConfig.show( db )
print "AdminConfig.showall( db ): "
AdminConfig.showall( db )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' ) | Create documentation of DataSource Settings | 8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8 | Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 |
3a924ebac8ecd1c8ff1dcbf60b9e5ea45fa58554 | src/database/__init__.py | src/database/__init__.py | from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.pool import StaticPool
session = None
def init_session(connection_string=None, drop=False):
if connection_string is None:
engine = create_engine('sqlite://',
echo=True,
connect_args={'check_same_thread':False},
poolclass=StaticPool)
else:
engine = create_engine(connection_string)
from database.model import Base
global session
if drop:
try:
old_session = session
Base.metadata.drop_all(bind=old_session.bind)
except:
pass
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base.metadata.create_all(bind=engine)
session = db_session
| from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.pool import StaticPool
# The global database session.
session = None
def init_session(connection_string=None, drop=False):
"""Initialize the database session and create the schema if it
does not exist.
The connection string describes the database connection.
Documentation on this can be found on [1]. If its omitted
a temporary in-memory sqlite database will be used. This
is useful for unittesting where you need a fresh database
on every run.
The schema can also be dropped before initialization by
setting the drop parameter to true.
The session can be accessed by the session variable of the
database module.
[1] http://docs.sqlalchemy.org/en/rel_0_8/core/engines.html
:param connection_string: The connection description for the
engine. See above for details
:param drop: Drop the schema and recreate it in init. All
data will be lost!
"""
if connection_string is None:
engine = create_engine('sqlite://',
echo=True,
connect_args={'check_same_thread':False},
poolclass=StaticPool)
else:
engine = create_engine(connection_string)
from database.model import Base
global session
if drop:
try:
old_session = session
Base.metadata.drop_all(bind=old_session.bind)
except:
pass
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base.metadata.create_all(bind=engine)
session = db_session
| Add documentation on the database session stuff. | Add documentation on the database session stuff.
| Python | bsd-3-clause | eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system |
4f0fbe048967cab5eb13850cd5ae1e97560a7b27 | version.py | version.py | major = 0
minor=0
patch=8
branch="master"
timestamp=1376412892.53 | major = 0
minor=0
patch=9
branch="master"
timestamp=1376413554.96 | Tag commit for v0.0.9-master generated by gitmake.py | Tag commit for v0.0.9-master generated by gitmake.py
| Python | mit | ryansturmer/gitmake |
2c8351ff8691eb9ad3009d316d932528d6f5c57d | runtests.py | runtests.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import django
from django.conf import settings
from django.core.management import call_command
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
opts = {'INSTALLED_APPS': ['widget_tweaks']}
if django.VERSION[:2] < (1, 5):
opts['DATABASES'] = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':MEMORY:',
}
}
if django.VERSION[:2] >= (1, 10):
opts['TEMPLATES'] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
},
]
settings.configure(**opts)
if django.VERSION[:2] >= (1, 7):
django.setup()
if __name__ == "__main__":
call_command('test', 'widget_tweaks')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import django
from django.conf import settings
from django.core.management import call_command
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
opts = {'INSTALLED_APPS': ['widget_tweaks']}
if django.VERSION[:2] < (1, 5):
opts['DATABASES'] = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':MEMORY:',
}
}
if django.VERSION[:2] >= (1, 10):
opts['TEMPLATES'] = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
},
]
settings.configure(**opts)
if django.VERSION[:2] >= (1, 7):
django.setup()
if __name__ == "__main__":
call_command('test', 'widget_tweaks', verbosity=2)
| Add more verbosity on test running | :lipstick: Add more verbosity on test running
| Python | mit | kmike/django-widget-tweaks,daniboy/django-widget-tweaks |
4359a9947c1d86d9e4003c1e8fc358e9a66c6b1d | DisplayAdapter/display_adapter/scripts/init_db.py | DisplayAdapter/display_adapter/scripts/init_db.py | __author__ = 'richard'
| """
Script that is run from the command line in order to
"""
import sys
import sqlite3
from display_adapter import db_name
help_message = """
This initialises an sqlite3 db for the purposes of the DisplayAdapter programs.
Arguments: init_db.py database_name
"""
runs_table = """
CREATE TABLE runs (
id INTEGER NOT NULL,
input_pattern VARCHAR,
time_slot DATETIME,
user_name VARCHAR(50),
PRIMARY KEY (id)
)
"""
screensavers_table = """
CREATE TABLE screensavers (
pattern VARCHAR
)
"""
def init_db(db_name=db_name):
"""
This function takes a database name and creates the database required
for the DisplayAdapter programs
"""
con = sqlite3.connect(db_name)
cur = con.cursor()
cur.execute(runs_table)
cur.execute(screensavers_table)
con.commit()
con.close()
if __name__ == "__main__":
if len(sys.argv) < 2:
if sys.argv[1].lower() == "help":
print(help_message)
else:
init_db(sys.argv[1])
else:
init_db() | Create internal db initialisation script | Create internal db initialisation script
Paired by Michael and Richard
| Python | mit | CO600GOL/Game_of_life,CO600GOL/Game_of_life,CO600GOL/Game_of_life |
4e9dfbaff5a91af75e3b18e6b4e06379747c6083 | research_pyutils/__init__.py | research_pyutils/__init__.py | # expose the most frequently used functions in the top level.
from .path_related import (mkdir_p, rm_if_exists, remove_empty_paths,
copy_contents_of_folder, count_files,
copy_the_previous_if_missing,
folders_last_modification)
try:
from .menpo_related import (resize_all_images, from_ln_to_bb_path,
process_lns_path, compute_overlap,
rasterize_all_lns, flip_images)
except ImportError:
m1 = ('The menpo related utils are not imported. If '
'you intended to use them please check your '
'menpo installation.')
print(m1)
from .filenames_changes import (rename_files, change_suffix,
strip_filenames)
from .auxiliary import (execution_stats, compare_python_types,
whoami, populate_visual_options)
| # expose the most frequently used functions in the top level.
from .path_related import (mkdir_p, rm_if_exists, remove_empty_paths,
copy_contents_of_folder, count_files,
copy_the_previous_if_missing,
folders_last_modification)
try:
from .menpo_related import (resize_all_images, from_ln_to_bb_path,
process_lns_path, compute_overlap,
rasterize_all_lns, flip_images,
check_if_greyscale_values)
except ImportError:
m1 = ('The menpo related utils are not imported. If '
'you intended to use them please check your '
'menpo installation.')
print(m1)
from .filenames_changes import (rename_files, change_suffix,
strip_filenames)
from .auxiliary import (execution_stats, compare_python_types,
whoami, populate_visual_options)
| Add in the init the newly introduced function | Add in the init the newly introduced function
| Python | apache-2.0 | grigorisg9gr/pyutils,grigorisg9gr/pyutils |
ba4f692e00d87afdd65d3a1b88046089b709eaab | organizer/views.py | organizer/views.py | from django.http.response import HttpResponse
from django.template import Context, loader
from .models import Tag
def homepage(request):
tag_list = Tag.objects.all()
template = loader.get_template(
'organizer/tag_list.html')
context = Context({'tag_list': tag_list})
output = template.render(context)
return HttpResponse(output)
def tag_detail(request):
return HttpResponse()
| from django.http.response import HttpResponse
from django.template import Context, loader
from .models import Tag
def homepage(request):
tag_list = Tag.objects.all()
template = loader.get_template(
'organizer/tag_list.html')
context = Context({'tag_list': tag_list})
output = template.render(context)
return HttpResponse(output)
def tag_detail(request):
# slug = ?
tag = Tag.objects.get(slug__iexact=slug)
return HttpResponse()
| Tag Detail: get Tag from database. | Ch05: Tag Detail: get Tag from database.
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 |
147a3d1c690b7e1c80fafd6eb4c834585e733564 | ibmcnx/test/loadFunction.py | ibmcnx/test/loadFunction.py |
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
globdict = globals()
def loadFilesService():
global globdict
execfile("filesAdmin.py", globdict)
|
import sys
from java.lang import String
from java.util import HashSet
from java.util import HashMap
import java
globdict = globals()
def loadFilesService():
global globdict
exec open("filesAdmin.py").read()
| Customize scripts to work with menu | Customize scripts to work with menu
| Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 |
76c87d06efaac19350d870cd1c95229ed0a66c29 | editdistance/__init__.py | editdistance/__init__.py | from .bycython import eval
__all__ = ('eval',)
| from .bycython import eval
def distance(*args, **kwargs):
""""An alias to eval"""
return eval(*args, **kwargs)
__all__ = ('eval', 'distance')
| Add alias method named "distance" | Add alias method named "distance"
| Python | mit | aflc/editdistance,aflc/editdistance,aflc/editdistance |
bea98b8228131d4228ba364c18ec89f4188c8a18 | rdtools/__init__.py | rdtools/__init__.py | from energy_normalization import normalize_with_sapm
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| from energy_normalization import normalize_with_sapm
from degradation import rd_with_ols
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| Add degradation module to init. | Add degradation module to init.
| Python | mit | kwhanalytics/rdtools,kwhanalytics/rdtools |
60c718564b8941d0d6fa684ee175f6bfe7c937cc | templates/test_scraper.py | templates/test_scraper.py | from tests import ScraperTest
from recipe_scrapers.template import Template
class TestTemplateScraper(ScraperTest):
scraper_class = Template
def test_host(self):
self.assertEqual("example.com", self.harvester_class.host())
def test_author(self):
self.assertEqual("", self.harvester_class.author())
def test_title(self):
self.assertEqual(self.harvester_class.title(), "")
def test_total_time(self):
self.assertEqual(0, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("", self.harvester_class.yields())
def test_image(self):
self.assertEqual("", self.harvester_class.image())
def test_ingredients(self):
self.assertCountEqual([], self.harvester_class.ingredients())
def test_instructions(self):
self.assertEqual("", self.harvester_class.instructions())
def test_ratings(self):
self.assertEqual(0, self.harvester_class.ratings())
| from tests import ScraperTest
from recipe_scrapers.template import Template
class TestTemplateScraper(ScraperTest):
scraper_class = Template
def test_host(self):
self.assertEqual("example.com", self.harvester_class.host())
def test_author(self):
self.assertEqual("", self.harvester_class.author())
def test_title(self):
self.assertEqual("", self.harvester_class.title())
def test_total_time(self):
self.assertEqual(0, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("", self.harvester_class.yields())
def test_image(self):
self.assertEqual("", self.harvester_class.image())
def test_ingredients(self):
self.assertCountEqual([], self.harvester_class.ingredients())
def test_instructions(self):
self.assertEqual("", self.harvester_class.instructions())
def test_ratings(self):
self.assertEqual(0, self.harvester_class.ratings())
| Change test template assertion order to be consistent | Change test template assertion order to be consistent
| Python | mit | hhursev/recipe-scraper |
aff77b144c1a1895c9e8c0ca2d4e79451525901c | terminus/models/trunk.py | terminus/models/trunk.py | """
Copyright (C) 2017 Open Source Robotics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
| """
Copyright (C) 2017 Open Source Robotics Foundation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2, reversed=True)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
| Make Trunks have opposite directions in the included lanes | Make Trunks have opposite directions in the included lanes
| Python | apache-2.0 | ekumenlabs/terminus,ekumenlabs/terminus |
72a248416971a5765f908bfb26b28ea546d8d9bb | myuw_mobile/dao/canvas.py | myuw_mobile/dao/canvas.py | from restclients.canvas import Canvas
from myuw_mobile.dao.pws import Person
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logback import log_resp_time, log_exception
import logging
import traceback
class Enrollments:
def get_enrollments(self):
"""
Returns calendar information for the current term.
"""
timer = Timer()
logger = logging.getLogger('myuw_mobile.dao.canvas.Enrollments')
try:
regid = Person().get_regid()
return Canvas().get_enrollments_for_regid(regid)
except Exception as ex:
log_exception(logger,
'canvas.get_enrollments',
traceback.format_exc())
finally:
log_resp_time(logger,
'canvas.get_enrollments',
timer)
return []
| from restclients.canvas import Canvas
from myuw_mobile.dao.pws import Person
from myuw_mobile.logger.timer import Timer
from myuw_mobile.logger.logback import log_resp_time, log_exception
import logging
import traceback
class Enrollments:
def get_enrollments(self):
"""
Returns calendar information for the current term.
"""
timer = Timer()
logger = logging.getLogger('myuw_mobile.dao.canvas.Enrollments')
try:
regid = Person().get_regid()
return Canvas().get_courses_for_regid(regid)
except Exception as ex:
log_exception(logger,
'canvas.get_enrollments',
traceback.format_exc())
finally:
log_resp_time(logger,
'canvas.get_enrollments',
timer)
return []
| Switch to courses instead of enrollments - MUWM-457 | Switch to courses instead of enrollments - MUWM-457
| Python | apache-2.0 | fanglinfang/myuw,uw-it-aca/myuw,fanglinfang/myuw,uw-it-aca/myuw,uw-it-aca/myuw,fanglinfang/myuw,uw-it-aca/myuw |
e8940b632737f75897c0ea7c108563a63f1a5dde | transducer/test/test_functional.py | transducer/test/test_functional.py | import unittest
from transducer.functional import compose
class TestComposition(unittest.TestCase):
def test_single(self):
"""
compose(f)(x) -> f(x)
"""
f = lambda x: x * 2
c = compose(f)
# We can't test the equivalence of functions completely, so...
self.assertSequenceEqual([f(x) for x in range(1000)],
[c(x) for x in range(1000)])
def test_double(self):
"""
compose(f, g)(x) -> f(g(x))
"""
f = lambda x: x * 2
g = lambda x: x + 1
c = compose(f, g)
self.assertSequenceEqual([f(g(x)) for x in range(100)],
[c(x) for x in range(100)])
def test_triple(self):
"""
compose(f, g, h)(x) -> f(g(h(x)))
"""
f = lambda x: x * 2
g = lambda x: x + 1
h = lambda x: x - 7
c = compose(f, g, h)
self.assertSequenceEqual([f(g(h(x))) for x in range(100)],
[c(x) for x in range(100)])
if __name__ == '__main__':
unittest.main()
| import unittest
from transducer.functional import compose, true, identity, false
class TestComposition(unittest.TestCase):
def test_single(self):
"""
compose(f)(x) -> f(x)
"""
f = lambda x: x * 2
c = compose(f)
# We can't test the equivalence of functions completely, so...
self.assertSequenceEqual([f(x) for x in range(1000)],
[c(x) for x in range(1000)])
def test_double(self):
"""
compose(f, g)(x) -> f(g(x))
"""
f = lambda x: x * 2
g = lambda x: x + 1
c = compose(f, g)
self.assertSequenceEqual([f(g(x)) for x in range(100)],
[c(x) for x in range(100)])
def test_triple(self):
"""
compose(f, g, h)(x) -> f(g(h(x)))
"""
f = lambda x: x * 2
g = lambda x: x + 1
h = lambda x: x - 7
c = compose(f, g, h)
self.assertSequenceEqual([f(g(h(x))) for x in range(100)],
[c(x) for x in range(100)])
class TestFunctions(unittest.TestCase):
def test_true(self):
self.assertTrue(true())
def test_false(self):
self.assertFalse(false())
def test_identity(self):
self.assertEqual(identity(42), 42)
if __name__ == '__main__':
unittest.main()
| Improve test coverage of functional.py. | Improve test coverage of functional.py.
| Python | mit | sixty-north/python-transducers |
3cf11b24cb09c1929d574f0488fac01d1032c205 | lib/python2.6/aquilon/client/depends.py | lib/python2.6/aquilon/client/depends.py | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Suggested versions of external libraries, and the defaults for the
binaries shipped.
"""
import ms.version
ms.version.addpkg('lxml', '2.3.2')
| # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Suggested versions of external libraries, and the defaults for the
binaries shipped.
"""
import sys
import ms.version
ms.version.addpkg('lxml', '2.3.2')
if sys.platform == "sunos5":
# ctypes is missing from the default Python build on Solaris, due to
# http://bugs.python.org/issue2552. It is available as a separate package
# though.
ms.version.addpkg("ctypes", "1.0.2")
# required to move the ctypes path before the core paths
sys.path[0] = sys.path.pop()
| Make the aq client work on Solaris/x86 | Make the aq client work on Solaris/x86
The ctypes module cannot be built with the Sun C compiler, therefore it
is missing from the default Python build on Solaris.
Change-Id: I4b81adc051ea38bf9bde126f71a09d4e78c1b9b3
Addresses-Issue: Jira/AQUILON-994
Reviewed-By: Nathan Dimmock <[email protected]>
| Python | apache-2.0 | guillaume-philippon/aquilon,quattor/aquilon,quattor/aquilon,quattor/aquilon,guillaume-philippon/aquilon,guillaume-philippon/aquilon,stdweird/aquilon,stdweird/aquilon,stdweird/aquilon |
2408c5260106e050557b4898d5826932eb758142 | normandy/selfrepair/views.py | normandy/selfrepair/views.py | from django.shortcuts import render
from normandy.base.decorators import api_cache_control
@api_cache_control()
def repair(request, locale):
return render(request, "selfrepair/repair.html")
| from django.shortcuts import render
from django.views.decorators.cache import cache_control
ONE_WEEK_IN_SECONDS = 60 * 60 * 24 * 7
@cache_control(public=True, max_age=ONE_WEEK_IN_SECONDS)
def repair(request, locale):
return render(request, "selfrepair/repair.html")
| Increase cache on deprecated self-repair to one week | Increase cache on deprecated self-repair to one week
This view serves a message that the system is no longer active. We keep
it around because it is still gets about 40 million hits per day,
primarily from Firefox ESR 52, which never got the Normandy client.
Notably, when we dropped support for Windows XP from Firefox, we put all
XP users onto ESR 52, so we are not likely to be able to remove this
endpoint any time soon.
Fixes #1563
| Python | mpl-2.0 | mozilla/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy |
dd260182bd8157fd6ac2a266b3ae5cf168400266 | tests/custom_keywords.py | tests/custom_keywords.py | import os
from raven import Client
def generate_event(msg, dsn):
client = Client(dsn)
client.captureMessage(msg)
def clear_inbox(maildir):
print('Clearing inbox at {}'.format(maildir))
for fname in os.listdir(maildir):
os.remove(os.path.join(maildir, fname))
def inbox_should_contain_num_mails(maildir, count):
print('Testing if inbox at {} has {} items.'.format(maildir, count))
count = int(count)
nmails = len(os.listdir(maildir))
if nmails != count:
raise AssertionError(
'Inbox should contain {} messages, but has {}.'.format(
count, nmails)
)
def mail_should_contain_text(maildir, num, text):
print('Testing if mail {} in {} contains text {}.'.format(
num, maildir, text))
mails = os.listdir(maildir)
num = int(num)
if len(mails) < num:
raise AssertionError('Not enough mails in inbox (found {}).'.format(len(mails)))
fname = mails[num - 1]
with open(os.path.join(maildir, fname)) as f:
content = f.read()
if not text in content:
raise AssertionError('Mail does not contain text.')
| import os
from raven import Client
def generate_event(msg, dsn):
client = Client(dsn)
client.captureMessage(msg)
def clear_inbox(maildir):
print('Clearing inbox at {}'.format(maildir))
if not os.path.isdir(maildir):
return
for fname in os.listdir(maildir):
os.remove(os.path.join(maildir, fname))
def inbox_should_contain_num_mails(maildir, count):
print('Testing if inbox at {} has {} items.'.format(maildir, count))
count = int(count)
nmails = len(os.listdir(maildir))
if nmails != count:
raise AssertionError(
'Inbox should contain {} messages, but has {}.'.format(
count, nmails)
)
def mail_should_contain_text(maildir, num, text):
print('Testing if mail {} in {} contains text {}.'.format(
num, maildir, text))
mails = os.listdir(maildir)
num = int(num)
if len(mails) < num:
raise AssertionError('Not enough mails in inbox (found {}).'.format(len(mails)))
fname = mails[num - 1]
with open(os.path.join(maildir, fname)) as f:
content = f.read()
if not text in content:
raise AssertionError('Mail does not contain text.')
| Make Clear Inbox keyword more robust. | Make Clear Inbox keyword more robust.
| Python | bsd-3-clause | andialbrecht/sentry-comments,andialbrecht/sentry-comments |
114f40dd282d1837db42ffb6625760d1483d3192 | jfu/templatetags/jfutags.py | jfu/templatetags/jfutags.py | from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.template import Library, Context, loader
register = Library()
@register.simple_tag( takes_context = True )
def jfu(
context,
template_name = 'jfu/upload_form.html',
upload_handler_name = 'jfu_upload'
):
"""
Displays a form for uploading files using jQuery File Upload.
A user may supply both a custom template or a custom upload-handling URL
name by supplying values for template_name and upload_handler_name
respectively.
"""
context.update( {
'JQ_OPEN' : '{%',
'JQ_CLOSE' : '%}',
'upload_handler_url': reverse( upload_handler_name ),
} )
# Use the request context variable, injected
# by django.core.context_processors.request
# to generate the CSRF token.
context.update( csrf( context.get('request') ) )
t = loader.get_template( template_name )
return t.render( Context( context ) )
| from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.template import Library, Context, loader
register = Library()
@register.simple_tag( takes_context = True )
def jfu(
context,
template_name = 'jfu/upload_form.html',
upload_handler_name = 'jfu_upload',
*args, **kwargs
):
"""
Displays a form for uploading files using jQuery File Upload.
A user may supply both a custom template or a custom upload-handling URL
name by supplying values for template_name and upload_handler_name
respectively.
"""
context.update( {
'JQ_OPEN' : '{%',
'JQ_CLOSE' : '%}',
'upload_handler_url': reverse( upload_handler_name, kwargs=kwargs, args=args ),
} )
# Use the request context variable, injected
# by django.core.context_processors.request
# to generate the CSRF token.
context.update( csrf( context.get('request') ) )
t = loader.get_template( template_name )
return t.render( Context( context ) )
| Allow args and kwargs to upload_handler_name | Allow args and kwargs to upload_handler_name
Now can use args and kwargs for reverse url. Example in template:
{% jfu 'core/core_fileuploader.html' 'core_upload' object_id=1 content_type_str='app.model' %} | Python | bsd-3-clause | Alem/django-jfu,dzhuang/django-jfu,Alem/django-jfu,dzhuang/django-jfu,Alem/django-jfu,dzhuang/django-jfu,dzhuang/django-jfu,Alem/django-jfu |
9f3289f45c727835c8f52b0c2489b06da2f03c25 | pyglab/__init__.py | pyglab/__init__.py | __title__ = 'pyglab'
__version__ = '0.0dev'
__author__ = 'Michael Schlottke'
__license__ = 'MIT License'
__copyright__ = '(c) 2014 Michael Schlottke'
from .pyglab import Pyglab
from .apirequest import ApiRequest, RequestType
| __title__ = 'pyglab'
__version__ = '0.0dev'
__author__ = 'Michael Schlottke'
__license__ = 'MIT License'
__copyright__ = '(c) 2014 Michael Schlottke'
from .pyglab import Pyglab
from .exceptions import RequestError
from .apirequest import ApiRequest, RequestType
| Make RequestError available in package root. | Make RequestError available in package root.
| Python | mit | sloede/pyglab,sloede/pyglab |
cd2bc29837d31d8999d9f72f7ddaecddb56e26a5 | tests/unit/test_views.py | tests/unit/test_views.py | from flask import json
from nose.tools import eq_
from server import app
client = app.test_client()
def test_hello_world():
# When: I access root path
resp = client.get('/')
# Then: Expected response is returned
eq_(resp.status_code, 200)
eq_(resp.headers['Content-Type'], 'application/json')
data = json.loads(resp.data.decode())
eq_(data['message'], 'Hello Worlb!')
| from flask import json
from nose.tools import eq_
from server import app
client = app.test_client()
def test_hello_world():
# When: I access root path
resp = client.get('/')
# Then: Expected response is returned
eq_(resp.status_code, 200)
eq_(resp.headers['Content-Type'], 'application/json')
data = json.loads(resp.data.decode())
eq_(data['message'].startswith('Hello'), True)
| Use startswith instead of exact string match | Use startswith instead of exact string match
| Python | mit | agarone-mm/scholastic-demo,totem/totem-demo,risingspiral/appnexus-demo |
0876264d9f344dae2006841913f6b2308129f8c1 | fabfile.py | fabfile.py |
import os
import logging
from decouple import config
FOLDER = 'public'
FOLDER = FOLDER.strip('/')
log = logging.getLogger('deploy')
def deploy():
import boto
from boto.s3.connection import S3Connection
AWS_ACCESS_KEY_ID = config('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = config('AWS_SECRET_ACCESS_KEY')
BUCKET_NAME = config('AWS_BUCKET_NAME')
conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket = conn.get_bucket(BUCKET_NAME)
key = boto.s3.key.Key(bucket)
for dirpath, dirnames, filenames in os.walk(FOLDER):
# do not use the FOLDER prefix
destpath = dirpath[len(FOLDER):]
destpath = destpath.strip('/')
log.info("Uploading {0} files from {1} to {2} ...".format(len(filenames),
dirpath,
BUCKET_NAME))
for filename in filenames:
key.name = os.path.relpath(os.path.join(destpath, filename)
).replace('\\', '/')
key.set_contents_from_filename(os.path.join(dirpath, filename))
|
import os
import logging
from decouple import config
FOLDER = 'public'
FOLDER = FOLDER.strip('/')
logging.basicConfig(level=logging.INFO)
def deploy():
import boto
from boto.s3.connection import S3Connection
AWS_ACCESS_KEY_ID = config('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = config('AWS_SECRET_ACCESS_KEY')
BUCKET_NAME = config('AWS_BUCKET_NAME')
conn = S3Connection(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket = conn.get_bucket(BUCKET_NAME)
key = boto.s3.key.Key(bucket)
for dirpath, dirnames, filenames in os.walk(FOLDER):
# do not use the FOLDER prefix
destpath = dirpath[len(FOLDER):]
destpath = destpath.strip('/')
logging.info("Uploading %s files from %s to %s", len(filenames),
dirpath, BUCKET_NAME)
for filename in filenames:
key.name = os.path.relpath(os.path.join(destpath, filename)
).replace('\\', '/')
key.set_contents_from_filename(os.path.join(dirpath, filename))
logging.debug("Sending %s", key.name)
logging.info("done :)")
| Change to use logging and set log level to INFO | Change to use logging and set log level to INFO
| Python | mit | vintasoftware/cdrf.co,vintasoftware/cdrf.co,aericson/cdrf.co,aericson/cdrf.co,vintasoftware/cdrf.co,aericson/cdrf.co |
a59a856a52a175ae3cbe79fcd6ea49d481aaacf3 | fabfile.py | fabfile.py | from fabric.api import * # noqa
env.hosts = [
'104.131.30.135',
]
env.user = "root"
env.directory = "/home/django/api.freemusic.ninja"
env.deploy_path = "/home/django/django_project"
def deploy():
with cd(env.directory):
run("git fetch")
run("git reset --hard origin/master")
sudo("pip3 install -r requirements.txt")
sudo("python3 manage.py collectstatic --noinput")
sudo("python3 manage.py migrate --noinput", user='django')
run("rm -f {deploy_path}".format(deploy_path=env.deploy_path))
run("ln -s {project_path} {deploy_path}".format(
project_path=env.directory, deploy_path=env.deploy_path))
run("service gunicorn restart")
run("service celeryd restart")
def dbshell():
with cd(env.directory):
sudo("python3 manage.py dbshell", user='django')
def shell():
with cd(env.directory):
sudo("python3 manage.py shell", user='django')
def migrate():
with cd(env.directory):
sudo("python3 manage.py migrate", user='django')
def gunicorn_restart():
run("service gunicorn restart")
| from fabric.api import * # noqa
env.hosts = [
'104.131.30.135',
]
env.user = "root"
env.directory = "/home/django/api.freemusic.ninja"
env.deploy_path = "/home/django/django_project"
def deploy():
with cd(env.directory):
run("git fetch")
run("git reset --hard origin/master")
sudo("pip3 install -r requirements.txt")
sudo("python3 manage.py collectstatic --noinput")
sudo("python3 manage.py migrate --noinput", user='django')
run("rm -f {deploy_path}".format(deploy_path=env.deploy_path))
run("ln -s {project_path} {deploy_path}".format(
project_path=env.directory, deploy_path=env.deploy_path))
run("service gunicorn restart")
run("service celeryd stop")
run("service celeryd start")
def dbshell():
with cd(env.directory):
sudo("python3 manage.py dbshell", user='django')
def shell():
with cd(env.directory):
sudo("python3 manage.py shell", user='django')
def migrate():
with cd(env.directory):
sudo("python3 manage.py migrate", user='django')
def gunicorn_restart():
run("service gunicorn restart")
| Break celeryd restart into distict 'stop' and 'start' commands on deploy | Break celeryd restart into distict 'stop' and 'start' commands on deploy
| Python | bsd-3-clause | FreeMusicNinja/api.freemusic.ninja |
b30d4301d58766471f435536cf804f7a63448ac5 | qotr/tests/test_server.py | qotr/tests/test_server.py | from tornado import testing
from qotr.server import make_application
from qotr.config import config
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
return make_application()
def test_index(self):
response = self.fetch('/')
self.assertEqual(200, response.code)
def test_channel(self):
response = self.fetch('/c/foo')
self.assertEqual(200, response.code)
def test_arbitrary(self):
response = self.fetch('/arbitrary-page')
self.assertEqual(404, response.code)
def test_https_redirect(self):
_old_cfg = config.redirect_to_https
config.redirect_to_https = True
response = self.fetch('/c/foo', follow_redirects=False)
config.redirect_to_https = _old_cfg
self.assertEqual(301, response.code)
| from tornado import testing
from qotr.server import make_application
from qotr.config import config
class TestChannelHandler(testing.AsyncHTTPTestCase):
'''
Test the channel creation handler.
'''
port = None
application = None
def get_app(self):
return make_application()
# def test_index(self):
# response = self.fetch('/')
# self.assertEqual(200, response.code)
# def test_channel(self):
# response = self.fetch('/c/foo')
# self.assertEqual(200, response.code)
# def test_arbitrary(self):
# response = self.fetch('/arbitrary-page')
# self.assertEqual(404, response.code)
def test_https_redirect(self):
_old_cfg = config.redirect_to_https
config.redirect_to_https = True
response = self.fetch('/c/foo', follow_redirects=False)
config.redirect_to_https = _old_cfg
self.assertEqual(301, response.code)
| Disable testing for index.html, needs ember build | Disable testing for index.html, needs ember build
Signed-off-by: Rohan Jain <[email protected]>
| Python | agpl-3.0 | rmoorman/qotr,rmoorman/qotr,sbuss/qotr,rmoorman/qotr,crodjer/qotr,sbuss/qotr,crodjer/qotr,sbuss/qotr,curtiszimmerman/qotr,curtiszimmerman/qotr,rmoorman/qotr,crodjer/qotr,curtiszimmerman/qotr,curtiszimmerman/qotr,sbuss/qotr,crodjer/qotr |
cda81a4585d2b2be868e784566f3c804feb1e9bf | analyze.py | analyze.py | import sys
import re
def main(argv):
# Message to perform sentiment analysis on
message = argv[0] if len(argv) > 0 else ""
if message == "":
print("Usage: python analyze.py [message]")
sys.exit(1)
# Load the positive and negative words
words = {}
with open("words/positive.txt") as file:
for line in file:
words[line.rstrip()] = 1
with open("words/negative.txt") as file:
for line in file:
words[line.rstrip()] = -1
# Perform the sentiment analysis
score = 0
found = 0
for w in message.split():
# Only keep alphanumeric characters and some punctuation.
w = re.sub(r'[^\-\'+\w]', '', w).lower()
if w in words:
score += words[w]
found += 1
print(round(score / float(found) if found != 0 else 0, 2))
if __name__ == "__main__":
main(sys.argv[1:])
| import sys
import re
def main(argv):
# Load the positive and negative words
words = {}
with open("words/positive.txt") as file:
for line in file:
words[line.rstrip()] = 1
with open("words/negative.txt") as file:
for line in file:
words[line.rstrip()] = -1
# Perform the sentiment analysis
for message in sys.stdin:
score = 0
found = 0
for w in message.split():
# Only keep alphanumeric characters and some punctuation.
w = re.sub(r'[^\-\'+\w]', '', w).lower()
if w in words:
score += words[w]
found += 1
print(round(score / float(found) if found != 0 else 0, 2))
if __name__ == "__main__":
main(sys.argv[1:])
| Read from standard input and perform on each line | Read from standard input and perform on each line
The analyze script can now be run with, for example
- echo "Message" | python analyze.py
- cat | python analyze.py (enter messages and end with Ctrl-D)
- python analyze.py < filename
- MapReduce (at some point)
| Python | mit | timvandermeij/sentiment-analysis,timvandermeij/sentiment-analysis |
da72373b572d3ce76ccc82b9f4ada7a122e76eb2 | __init__.py | __init__.py | # The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 0, 1, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (release_tag, VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
| # The version of Review Board.
#
# This is in the format of:
#
# (Major, Minor, Micro, alpha/beta/rc/final, Release Number, Released)
#
VERSION = (1, 0, 1, 'alpha', 1, False)
def get_version_string():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
if VERSION[3] == 'rc':
version += ' RC%s' % VERSION[4]
else:
version += ' %s %s' % (VERSION[3], VERSION[4])
if not is_release():
version += " (dev)"
return version
def get_package_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version += ".%s" % VERSION[2]
if VERSION[3] != 'final':
version += '%s%s' % (VERSION[3], VERSION[4])
return version
def is_release():
return VERSION[5]
| Fix the version string generation when not a final or RC release. | Fix the version string generation when not a final or RC release.
| Python | mit | chipx86/reviewboard,sgallagher/reviewboard,chazy/reviewboard,davidt/reviewboard,custode/reviewboard,beol/reviewboard,atagar/ReviewBoard,Khan/reviewboard,1tush/reviewboard,reviewboard/reviewboard,custode/reviewboard,davidt/reviewboard,reviewboard/reviewboard,Khan/reviewboard,chazy/reviewboard,Khan/reviewboard,chazy/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,chazy/reviewboard,chipx86/reviewboard,chazy/reviewboard,davidt/reviewboard,KnowNo/reviewboard,davidt/reviewboard,Khan/reviewboard,1tush/reviewboard,chazy/reviewboard,Khan/reviewboard,atagar/ReviewBoard,bkochendorfer/reviewboard,KnowNo/reviewboard,beol/reviewboard,Khan/reviewboard,1tush/reviewboard,1tush/reviewboard,asutherland/opc-reviewboard,chipx86/reviewboard,1tush/reviewboard,reviewboard/reviewboard,asutherland/opc-reviewboard,Khan/reviewboard,custode/reviewboard,chipx86/reviewboard,atagar/ReviewBoard,brennie/reviewboard,Khan/reviewboard,1tush/reviewboard,atagar/ReviewBoard,chazy/reviewboard,brennie/reviewboard,chazy/reviewboard,asutherland/opc-reviewboard,bkochendorfer/reviewboard,reviewboard/reviewboard,atagar/ReviewBoard,atagar/ReviewBoard,Khan/reviewboard,atagar/ReviewBoard,brennie/reviewboard,KnowNo/reviewboard,KnowNo/reviewboard,beol/reviewboard,1tush/reviewboard,atagar/ReviewBoard,chazy/reviewboard,bkochendorfer/reviewboard,sgallagher/reviewboard,1tush/reviewboard,custode/reviewboard,sgallagher/reviewboard,sgallagher/reviewboard,asutherland/opc-reviewboard,beol/reviewboard,brennie/reviewboard,atagar/ReviewBoard |
ac3c0e93adf35015d7f6cfc8c6cf2e6ec45cdeae | server/canonicalization/relationship_mapper.py | server/canonicalization/relationship_mapper.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Contains functions to canonicalize relationships."""
from __future__ import absolute_import
from __future__ import print_function
from nltk.corpus import wordnet
from .utils import wordnet_helper
from .utils import common
def canonicalize_relationship(text):
words = common.clean_text(text).split()
freq = []
for word in words:
for pos in [wordnet.VERB, wordnet.ADV]:
freq.extend(wordnet_helper.lemma_counter(word,
pos=pos).most_common())
if freq:
return max(freq, key=lambda x: x[1])[0]
else:
return None
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Contains functions to canonicalize relationships."""
from __future__ import absolute_import
from __future__ import print_function
import repoze.lru
from nltk.corpus import wordnet
from .utils import wordnet_helper
from .utils import common
@repoze.lru.lru_cache(4096)
def canonicalize_relationship(text):
words = common.clean_text(text).split()
freq = []
for word in words:
for pos in [wordnet.VERB, wordnet.ADV]:
freq.extend(wordnet_helper.lemma_counter(word,
pos=pos).most_common())
if freq:
return max(freq, key=lambda x: x[1])[0]
else:
return None
| Add LRU for relationship mapper. | [master] Add LRU for relationship mapper.
| Python | mit | hotpxl/canonicalization-server,hotpxl/canonicalization-server |
452924faafcfb4dcb1eb960ea30ab000f1f93962 | migrations/versions/0245_archived_flag_jobs.py | migrations/versions/0245_archived_flag_jobs.py | """
Revision ID: 0245_archived_flag_jobs
Revises: 0244_another_letter_org
Create Date: 2018-11-22 16:32:01.105803
"""
from alembic import op
import sqlalchemy as sa
revision = '0245_archived_flag_jobs'
down_revision = '0244_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=False, server_default=sa.false()))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('jobs', 'archived')
# ### end Alembic commands ###
| """
Revision ID: 0245_archived_flag_jobs
Revises: 0244_another_letter_org
Create Date: 2018-11-22 16:32:01.105803
"""
from alembic import op
import sqlalchemy as sa
revision = '0245_archived_flag_jobs'
down_revision = '0244_another_letter_org'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('jobs', sa.Column('archived', sa.Boolean(), nullable=True))
op.execute('update jobs set archived = false')
op.alter_column('jobs', 'archived', nullable=False, server_default=sa.false())
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('jobs', 'archived')
# ### end Alembic commands ###
| Update jobs archived flag before setting the default value | Update jobs archived flag before setting the default value
Running an update before setting the column default value reduces
the time the table is locked (since most rows don't have a NULL
value anymore), but the migration takes slightly longer to run
overall.
| Python | mit | alphagov/notifications-api,alphagov/notifications-api |
4da19da48cb9c78e1ec327a2cc886a03ca60d67c | rtropo/outgoing.py | rtropo/outgoing.py | from urllib import urlencode
from urllib2 import urlopen
from rapidsms.backends.base import BackendBase
class TropoBackend(BackendBase):
"""A RapidSMS threadless backend for Tropo"""
def configure(self, config=None, **kwargs):
self.config = config
super(TropoBackend, self).configure(**kwargs)
def send(self, message):
base_url = 'http://api.tropo.com/1.0/sessions'
token = self.config['auth_token']
action = 'create'
number = self.config['number']
params = urlencode([('action', action), ('token', token), ('numberToDial', message.connection.identity), ('msg', message.text)])
self.debug("%s?%s" % (base_url, params))
data = urlopen('%s?%s' % (base_url, params)).read()
self.debug(data)
| from urllib import urlencode
from urllib2 import urlopen
from rapidsms.backends.base import BackendBase
class TropoBackend(BackendBase):
"""A RapidSMS threadless backend for Tropo"""
def configure(self, config=None, **kwargs):
self.config = config
def start(self):
"""Override BackendBase.start(), which never returns"""
self._running = True
def send(self, message):
self.debug("send(%s)" % message)
base_url = 'http://api.tropo.com/1.0/sessions'
token = self.config['auth_token']
action = 'create'
number = self.config['number']
params = urlencode([('action', action), ('token', token), ('numberToDial', message.connection.identity), ('msg', message.text)])
self.debug("%s?%s" % (base_url, params))
data = urlopen('%s?%s' % (base_url, params)).read()
self.debug(data)
return True
| Fix indentation; override old-style start() from BackendBase | Fix indentation; override old-style start() from BackendBase
| Python | bsd-3-clause | caktus/rapidsms-tropo,dimagi/rapidsms-tropo |
e3a530d741529a7bbfeb274c232e2c6b8a5faddc | kokki/cookbooks/postgresql9/recipes/default.py | kokki/cookbooks/postgresql9/recipes/default.py | import os
from kokki import Execute, Package
apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
Execute("apt-update-postgresql9",
command = "apt-get update",
action = "nothing")
apt = None
if env.system.platform == "ubuntu":
Package("python-software-properties")
Execute("add-apt-repository ppa:pitti/postgresql",
not_if = lambda:os.path.exists(apt_list_path),
notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
| import os
from kokki import Execute, Package
if not (env.system.platform == "ubuntu" and env.system.lsb['release'] in ["11.10"]):
apt_list_path = '/etc/apt/sources.list.d/pitti-postgresql-lucid.list'
Execute("apt-update-postgresql9",
command = "apt-get update",
action = "nothing")
apt = None
if env.system.platform == "ubuntu":
Package("python-software-properties")
Execute("add-apt-repository ppa:pitti/postgresql",
not_if = lambda:os.path.exists(apt_list_path),
notifies = [("run", env.resources["Execute"]["apt-update-postgresql9"], True)])
| Use standard repo for postgresql9 in ubuntu 11.10 | Use standard repo for postgresql9 in ubuntu 11.10
| Python | bsd-3-clause | samuel/kokki |
8e47696a805cce70989a79cc6e8324aaec870f6d | electionleaflets/apps/people/devs_dc_helpers.py | electionleaflets/apps/people/devs_dc_helpers.py | import requests
from django.conf import settings
class DevsDCAPIHelper:
def __init__(self):
self.AUTH_TOKEN = settings.DEVS_DC_AUTH_TOKEN
self.base_url = "https://developers.democracyclub.org.uk/api/v1"
def make_request(self, endpoint, **params):
default_params = {
"auth_token": self.AUTH_TOKEN
}
if params:
default_params.update(params)
url = "{}/{}/".format(self.base_url, endpoint)
return requests.get(url, default_params)
def postcode_request(self, postcode):
return self.make_request("postcode/{}".format(postcode))
| import requests
from django.conf import settings
class DevsDCAPIHelper:
def __init__(self):
self.AUTH_TOKEN = settings.DEVS_DC_AUTH_TOKEN
self.base_url = "https://developers.democracyclub.org.uk/api/v1"
self.ballot_cache = {}
def make_request(self, endpoint, **params):
default_params = {
"auth_token": self.AUTH_TOKEN
}
if params:
default_params.update(params)
url = "{}/{}/".format(self.base_url, endpoint)
return requests.get(url, default_params)
def postcode_request(self, postcode):
return self.make_request("postcode/{}".format(postcode))
def ballot_request(self, ballot_paper_id):
if ballot_paper_id not in self.ballot_cache:
r = self.make_request("elections/{}".format(ballot_paper_id))
if r.status_code == 200:
self.ballot_cache[ballot_paper_id] = r
else:
return r
return self.ballot_cache[ballot_paper_id]
| Add a cached ballot fetcher to the DevsDC helper | Add a cached ballot fetcher to the DevsDC helper
If we happen to run out of RAM in Lambda (we won't), Lambda will just
kill the function and invoke a new one next time.
| Python | mit | DemocracyClub/electionleaflets,DemocracyClub/electionleaflets,DemocracyClub/electionleaflets |
00c808efd2ab38bcf9d808dcb784c9360a19937f | api/radar_api/views/organisation_consultants.py | api/radar_api/views/organisation_consultants.py | from radar_api.serializers.organisation_consultants import OrganisationConsultantSerializer
from radar.models import OrganisationConsultant
from radar.views.core import RetrieveUpdateDestroyModelView, ListCreateModelView
class OrganisationConsultantListView(ListCreateModelView):
serializer_class = OrganisationConsultantSerializer
model_class = OrganisationConsultant
class OrganisationConsultantDetailView(RetrieveUpdateDestroyModelView):
serializer_class = OrganisationConsultantSerializer
model_class = OrganisationConsultant
def register_views(app):
app.add_url_rule('/organisation-consultants', view_func=OrganisationConsultantListView.as_view('organisation_consultant_list'))
app.add_url_rule('/organisation-consultants/<int:id>', view_func=OrganisationConsultantDetailView.as_view('organisation_consultant_detail'))
| from radar_api.serializers.organisation_consultants import OrganisationConsultantSerializer
from radar.models import OrganisationConsultant
from radar.views.core import RetrieveUpdateDestroyModelView, ListCreateModelView
from radar.permissions import AdminPermission
class OrganisationConsultantListView(ListCreateModelView):
serializer_class = OrganisationConsultantSerializer
model_class = OrganisationConsultant
permission_classes = [AdminPermission]
class OrganisationConsultantDetailView(RetrieveUpdateDestroyModelView):
serializer_class = OrganisationConsultantSerializer
model_class = OrganisationConsultant
permission_classes = [AdminPermission]
def register_views(app):
app.add_url_rule('/organisation-consultants', view_func=OrganisationConsultantListView.as_view('organisation_consultant_list'))
app.add_url_rule('/organisation-consultants/<int:id>', view_func=OrganisationConsultantDetailView.as_view('organisation_consultant_detail'))
| Add permissions to organisation consultants endpoint | Add permissions to organisation consultants endpoint
| Python | agpl-3.0 | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar |
c977e1c235ccb040f28bc03c63d2667924d5edd3 | pythonforandroid/recipes/xeddsa/__init__.py | pythonforandroid/recipes/xeddsa/__init__.py | from pythonforandroid.recipe import CythonRecipe
from pythonforandroid.toolchain import current_directory, shprint
from os.path import join
import sh
class XedDSARecipe(CythonRecipe):
name = 'xeddsa'
version = '0.4.4'
url = 'https://pypi.python.org/packages/source/X/XEdDSA/XEdDSA-{version}.tar.gz'
depends = [
'setuptools',
'cffi',
'pynacl',
]
patches = ['remove_dependencies.patch']
call_hostpython_via_targetpython = False
def build_arch(self, arch):
with current_directory(join(self.get_build_dir(arch.arch))):
env = self.get_recipe_env(arch)
hostpython = sh.Command(self.ctx.hostpython)
shprint(
hostpython, 'ref10/build.py',
_env=env
)
shprint(sh.cp, '_crypto_sign.so', self.ctx.get_site_packages_dir())
self.install_python_package(arch)
recipe = XedDSARecipe()
| from pythonforandroid.recipe import CythonRecipe
from pythonforandroid.toolchain import current_directory, shprint
from os.path import join
import sh
class XedDSARecipe(CythonRecipe):
name = 'xeddsa'
version = '0.4.4'
url = 'https://pypi.python.org/packages/source/X/XEdDSA/XEdDSA-{version}.tar.gz'
depends = [
'setuptools',
'cffi',
'pynacl',
]
patches = ['remove_dependencies.patch']
call_hostpython_via_targetpython = False
def build_arch(self, arch):
with current_directory(join(self.get_build_dir(arch.arch))):
env = self.get_recipe_env(arch)
hostpython = sh.Command(self.ctx.hostpython)
shprint(
hostpython, 'ref10/build.py',
_env=env
)
# the library could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so`
# or simply `_crypto_sign.so` depending on the platform/distribution
sh.cp('-a', sh.glob('_crypto_sign*.so'), self.ctx.get_site_packages_dir())
self.install_python_package(arch)
recipe = XedDSARecipe()
| Fix xeddsa crypto_sign shared lib copy | Fix xeddsa crypto_sign shared lib copy
Could be `_crypto_sign.cpython-37m-x86_64-linux-gnu.so` or simply `_crypto_sign.so` depending on the platform/distribution | Python | mit | germn/python-for-android,rnixx/python-for-android,rnixx/python-for-android,germn/python-for-android,rnixx/python-for-android,kivy/python-for-android,PKRoma/python-for-android,germn/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android,kronenpj/python-for-android,kivy/python-for-android,PKRoma/python-for-android,kronenpj/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android,PKRoma/python-for-android,kivy/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,germn/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,PKRoma/python-for-android |
6deab74e41cabcb9a3fb4075f270a9cdd591a435 | pgallery/tests/test_utils.py | pgallery/tests/test_utils.py | from __future__ import unicode_literals
import unittest
from ..models import sanitize_exif_value
class SanitizeExifValueTestCase(unittest.TestCase):
def test_strip_null_bytes(self):
"""
Check that null bytes are stripped from the string.
"""
key = "not relevant"
value = "abc\x00d"
self.assertEqual(sanitize_exif_value(key, value), "abcd")
| from __future__ import unicode_literals
import unittest
from ..models import sanitize_exif_value
class SanitizeExifValueTestCase(unittest.TestCase):
def test_strip_null_bytes(self):
"""
Check that null bytes are stripped from the string.
"""
key = "not relevant"
value = "abc\x00d"
self.assertEqual(sanitize_exif_value(key, value), "abcd")
def test_coerce_to_text(self):
"""
Check that non-text types are coerced to text.
"""
key = "not relevant"
value = (20, 70)
self.assertEqual(sanitize_exif_value(key, value), "(20, 70)")
| Test type coercion in sanitize_exif_value | Test type coercion in sanitize_exif_value
| Python | mit | zsiciarz/django-pgallery,zsiciarz/django-pgallery |
42709afec9f2e2ed419365f61324ce0c8ff96423 | budget/forms.py | budget/forms.py | from django import forms
from django.template.defaultfilters import slugify
from budget.models import Budget, BudgetEstimate
class BudgetForm(forms.ModelForm):
class Meta:
model = Budget
fields = ('name', 'start_date')
def save(self):
if not self.instance.slug:
self.instance.slug = slugify(self.cleaned_data['name'])
super(BudgetForm, self).save()
class BudgetEstimateForm(forms.ModelForm):
class Meta:
model = BudgetEstimate
fields = ('category', 'amount')
def save(self, budget):
self.instance.budget = budget
super(BudgetEstimateForm, self).save()
| import datetime
from django import forms
from django.template.defaultfilters import slugify
from budget.models import Budget, BudgetEstimate
class BudgetForm(forms.ModelForm):
start_date = forms.DateTimeField(initial=datetime.datetime.now, required=False, widget=forms.SplitDateTimeWidget)
class Meta:
model = Budget
fields = ('name', 'start_date')
def save(self):
if not self.instance.slug:
self.instance.slug = slugify(self.cleaned_data['name'])
super(BudgetForm, self).save()
class BudgetEstimateForm(forms.ModelForm):
class Meta:
model = BudgetEstimate
fields = ('category', 'amount')
def save(self, budget):
self.instance.budget = budget
super(BudgetEstimateForm, self).save()
| Split the start_date for better data entry (and Javascript date pickers). | Split the start_date for better data entry (and Javascript date pickers).
| Python | bsd-3-clause | jokimies/django-pj-budget,jokimies/django-pj-budget,toastdriven/django-budget,toastdriven/django-budget,jokimies/django-pj-budget |
db6b9761d51d45b2708ba6bca997196fc73fbe94 | sheldon/__init__.py | sheldon/__init__.py | # -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: [email protected]
@license: The MIT license
Copyright (C) 2015
"""
# Bot module contains bot's main class - Sheldon
from sheldon.bot import *
# Hooks module contains hooks for plugins
from sheldon.hooks import *
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.utils import *
__author__ = 'Seva Zhidkov'
__version__ = '0.0.1#dev'
__email__ = '[email protected]'
| # -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: [email protected]
@license: The MIT license
Copyright (C) 2015
"""
# Bot module contains bot's main class - Sheldon
from sheldon.bot import *
# Hooks module contains hooks for plugins
from sheldon.hooks import *
# Adapter module contains classes and tools
# for plugins sending messages
from sheldon.adapter import *
# Utils folder contains scripts for more
# comfortable work with sending and parsing
# messages. For example, script for downloading
# files by url.
from sheldon.utils import *
__author__ = 'Seva Zhidkov'
__version__ = '0.0.1#dev'
__email__ = '[email protected]'
| Add adapter module to init file | Add adapter module to init file
| Python | mit | lises/sheldon |
82d312826ee67b098d9e9a52277912d8e1829960 | geocode.py | geocode.py | import os
import sys
import json
import urllib
import urllib2
api_key = os.getenv("MAPS_API_KEY")
if api_key == "":
sys.exit("Please obtain an API key from https://developers.google.com/maps/documentation/geocoding/start#get-a-key and set the environment variable MAPS_API_KEY")
#print api_key
url = 'https://maps.googleapis.com/maps/api/geocode/json?'
values = {'address' : 'Green Bank Telescope',
'key' : api_key }
data = urllib.urlencode(values)
full_url = url + '?' + data
response = urllib2.urlopen(full_url)
json_response = response.read()
data_dict = json.loads(json_response)
#print data_dict
lat = data_dict['results'][0]['geometry']['location']['lat']
lng = data_dict['results'][0]['geometry']['location']['lng']
print lat, lng
| import os
import sys
import json
import urllib
import urllib2
api_key = os.getenv("MAPS_API_KEY")
if api_key == "":
sys.exit("Please obtain an API key from https://developers.google.com/maps/documentation/geocoding/start#get-a-key and set the environment variable MAPS_API_KEY")
#print api_key
url = 'https://maps.googleapis.com/maps/api/geocode/json?'
values = {'address' : 'Campbell Hall',
'key' : api_key }
data = urllib.urlencode(values)
full_url = url + data
print full_url
response = urllib2.urlopen(full_url)
json_response = response.read()
data_dict = json.loads(json_response)
#print data_dict
lat = data_dict['results'][0]['geometry']['location']['lat']
lng = data_dict['results'][0]['geometry']['location']['lng']
print lat, lng
| Remove second ? from URL | Remove second ? from URL
| Python | bsd-3-clause | caseyjlaw/flaskigm,caseyjlaw/flaskigm |
987fd7555eadfa15d10db7991f4a7e8a4a7dbbbf | custom/topo-2sw-2host.py | custom/topo-2sw-2host.py | """Custom topology example
author: Brandon Heller ([email protected])
Two directly connected switches plus a host for each switch:
host --- switch --- switch --- host
Adding the 'topos' dict with a key/value pair to generate our newly defined
topology enables one to pass in '--topo=mytopo' from the command line.
"""
from mininet.topo import Topo, Node
class MyTopo( Topo ):
"Simple topology example."
def __init__( self, enable_all = True ):
"Create custom topo."
# Add default members to class.
super( MyTopo, self ).__init__()
# Set Node IDs for hosts and switches
leftHost = 1
leftSwitch = 2
rightSwitch = 3
rightHost = 4
# Add nodes
self.addNode( leftSwitch, Node( isSwitch=True ) )
self.addNode( rightSwitch, Node( isSwitch=True ) )
self.addNode( leftHost, Node( isSwitch=False ) )
self.addNode( rightHost, Node( isSwitch=False ) )
# Add edges
self.add_edge( leftHost, leftSwitch )
self.add_edge( leftSwitch, rightSwitch )
self.add_edge( rightSwitch, rightHost )
# Consider all switches and hosts 'on'
self.enable_all()
topos = { 'mytopo': ( lambda: MyTopo() ) }
| """Custom topology example
author: Brandon Heller ([email protected])
Two directly connected switches plus a host for each switch:
host --- switch --- switch --- host
Adding the 'topos' dict with a key/value pair to generate our newly defined
topology enables one to pass in '--topo=mytopo' from the command line.
"""
from mininet.topo import Topo
from mininet.node import Node
class MyTopo( Topo ):
"Simple topology example."
def __init__( self, enable_all = True ):
"Create custom topo."
# Add default members to class.
super( MyTopo, self ).__init__()
# Set Node IDs for hosts and switches
leftHost = 1
leftSwitch = 2
rightSwitch = 3
rightHost = 4
# Add nodes
self.addNode( leftSwitch, Node( isSwitch=True ) )
self.addNode( rightSwitch, Node( isSwitch=True ) )
self.addNode( leftHost, Node( isSwitch=False ) )
self.addNode( rightHost, Node( isSwitch=False ) )
# Add edges
self.add_edge( leftHost, leftSwitch )
self.add_edge( leftSwitch, rightSwitch )
self.add_edge( rightSwitch, rightHost )
# Consider all switches and hosts 'on'
self.enable_all()
topos = { 'mytopo': ( lambda: MyTopo() ) }
| Fix custom topology example; outdated import | Fix custom topology example; outdated import
Reported-by: Julius Bachnick
| Python | bsd-3-clause | mininet/mininet,mininet/mininet,mininet/mininet |
15e3bd894190ee745fab4534f7bbe14772d17ac1 | newswall/urls.py | newswall/urls.py | from django.conf.urls import url, patterns
from newswall.feeds import StoryFeed
from newswall import views
urlpatterns = patterns(
'',
url(r'^feed/$', StoryFeed()),
url(r'^$',
views.ArchiveIndexView.as_view(),
name='newswall_entry_archive'),
url(r'^(?P<year>\d{4})/$',
views.YearArchiveView.as_view(),
name='newswall_entry_archive_year'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/$',
views.MonthArchiveView.as_view(),
name='newswall_entry_archive_month'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/$',
views.DayArchiveView.as_view(),
name='newswall_entry_archive_day'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$',
views.DateDetailView.as_view(),
name='newswall_entry_detail'),
url(r'^source/(?P<slug>[-\w]+)/$',
views.SourceArchiveIndexView.as_view(),
name='newswall_source_detail'),
)
| from django.conf.urls import url, patterns
from newswall.feeds import StoryFeed
from newswall import views
urlpatterns = patterns(
'',
url(r'^feed/$', StoryFeed()),
url(r'^$',
views.ArchiveIndexView.as_view(),
name='newswall_entry_archive'),
url(r'^(?P<year>\d{4})/$',
views.YearArchiveView.as_view(),
name='newswall_entry_archive_year'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/$',
views.MonthArchiveView.as_view(),
name='newswall_entry_archive_month'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/$',
views.DayArchiveView.as_view(),
name='newswall_entry_archive_day'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$',
views.DateDetailView.as_view(),
name='newswall_entry_detail'),
url(r'^source/(?P<slug>[-\w]+)/$',
views.SourceArchiveIndexView.as_view(),
name='newswall_source_detail'),
)
| Test to see if commit works. | Test to see if commit works.
| Python | bsd-3-clause | registerguard/django-newswall,registerguard/django-newswall |
66805c134125d957d7c7a0234e6b46e6aa1fa17f | walltime.py | walltime.py | #!/usr/bin/env python
"""
Created on Fri Mar 14 15:25:36 2014
@author: ibackus
"""
import time
t0 = time.time()
import matplotlib.pyplot as plt
import numpy as np
import datetime
import sys
t1 = time.time()
print 'Importing took {} s'.format(t1-t0)
if len(sys.argv) < 2:
print 'USAGE: walltime filename'
else:
fname = sys.argv[-1]
log_file = np.genfromtxt(fname, comments='#', delimiter=' ')
walltime_total = datetime.timedelta(seconds = log_file[:,-1].sum())
walltime_avg = datetime.timedelta(seconds = log_file[:,-1].mean())
print 'Total walltime: '
print str(walltime_total)
print 'Average walltime per step:'
print str(walltime_avg)
plt.plot(log_file[:,-1],'x')
t2 = time.time()
print 'Running took an extra {} s'.format(t2-t1)
print 'For a total of {} s'.format(t2 - t0)
plt.show() | #!/usr/bin/env python
"""
Created on Fri Mar 14 15:25:36 2014
@author: ibackus
"""
import matplotlib.pyplot as plt
import numpy as np
import datetime
import sys
if len(sys.argv) < 2:
print 'USAGE: walltime filename'
else:
fname = sys.argv[-1]
log_file = np.genfromtxt(fname, comments='#', delimiter=' ')
walltime_total = datetime.timedelta(seconds = log_file[:,-1].sum())
walltime_avg = datetime.timedelta(seconds = log_file[:,-1].mean())
print 'Total walltime: '
print str(walltime_total)
print 'Average walltime per step:'
print str(walltime_avg)
plt.plot(log_file[:,-1],'x')
plt.show() | Revert "Print statements added for profiling" | Revert "Print statements added for profiling"
This reverts commit a6ae05c13666b83a1f1a8707fe21972bd1f758d9.
| Python | mit | ibackus/custom_python_packages,trquinn/custom_python_packages |
4b819129557d5f0546d9edf206710fd2ec962881 | utsokt/restapi/models.py | utsokt/restapi/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
class Story(models.Model):
url = models.URLField(_('URL'))
title = models.CharField(_('Title'), max_length=64)
excerpt = models.CharField(_('Excerpt'), max_length=64, null=True, blank=True)
created_at = models.TimeField(_('Created at'), auto_now_add=True)
is_unread = models.BooleanField(_('Is unread?'), default=True)
| from django.db import models
from django.utils.translation import ugettext_lazy as _
class Story(models.Model):
url = models.URLField(_('URL'))
title = models.CharField(_('Title'), max_length=64)
excerpt = models.CharField(_('Excerpt'), max_length=64, null=True, blank=True)
created_at = models.TimeField(_('Created at'), auto_now_add=True)
is_unread = models.BooleanField(_('Is unread?'), default=True)
class Meta:
ordering = ['-created_at']
| Order stories by descending creation time | Order stories by descending creation time
| Python | bsd-3-clause | madr/utsokt,madr/utsokt |
dd50858ee22c27076919614d1994e3ce9c8e2399 | soundem/handlers.py | soundem/handlers.py | from flask import jsonify
from soundem import app
def json_error_handler(e):
return jsonify({
'status_code': e.code,
'error': 'Bad Request',
'detail': e.description
}), e.code
@app.errorhandler(400)
def bad_request_handler(e):
return json_error_handler(e)
@app.errorhandler(401)
def unauthorized_handler(e):
return json_error_handler(e)
@app.errorhandler(404)
def not_found_handler(e):
return json_error_handler(e)
@app.errorhandler(405)
def method_not_allowed_handler(e):
return json_error_handler(e)
| from flask import jsonify
from soundem import app
def json_error_handler(e):
return jsonify({
'status_code': e.code,
'error': e.name,
'detail': e.description
}), e.code
@app.errorhandler(400)
def bad_request_handler(e):
return json_error_handler(e)
@app.errorhandler(401)
def unauthorized_handler(e):
return json_error_handler(e)
@app.errorhandler(404)
def not_found_handler(e):
return json_error_handler(e)
@app.errorhandler(405)
def method_not_allowed_handler(e):
return json_error_handler(e)
| Fix json error handler name | Fix json error handler name | Python | mit | building4theweb/soundem-api |
dfc7e8a46558d3cf0e7f63da347e2b34253e302c | soundmeter/utils.py | soundmeter/utils.py | from ctypes import *
from contextlib import contextmanager
import os
import stat
def get_file_path(f):
if f:
name = getattr(f, 'name')
if name:
path = os.path.abspath(name)
return path
def create_executable(path, content):
with open(path, 'w') as f:
f.write(content)
s = os.stat(path)
os.chmod(path, s.st_mode | stat.S_IEXEC)
# Work-around on error messages by alsa-lib
# http://stackoverflow.com/questions/7088672/
ERROR_HANDLER_FUNC = CFUNCTYPE(None, c_char_p, c_int,
c_char_p, c_int, c_char_p)
def py_error_handler(filename, line, function, err, fmt):
pass
c_error_handler = ERROR_HANDLER_FUNC(py_error_handler)
@contextmanager
def noalsaerr():
asound = cdll.LoadLibrary('libasound.so')
asound.snd_lib_error_set_handler(c_error_handler)
yield
asound.snd_lib_error_set_handler(None)
| from ctypes import * # NOQA
from contextlib import contextmanager
import os
import stat
def get_file_path(f):
if f:
name = getattr(f, 'name')
if name:
path = os.path.abspath(name)
return path
def create_executable(path, content):
with open(path, 'w') as f:
f.write(content)
s = os.stat(path)
os.chmod(path, s.st_mode | stat.S_IEXEC)
# Work-around on error messages by alsa-lib
# http://stackoverflow.com/questions/7088672/
ERROR_HANDLER_FUNC = CFUNCTYPE(None, c_char_p, c_int,
c_char_p, c_int, c_char_p)
def py_error_handler(filename, line, function, err, fmt):
pass
c_error_handler = ERROR_HANDLER_FUNC(py_error_handler)
@contextmanager
def noalsaerr():
asound = cdll.LoadLibrary('libasound.so')
asound.snd_lib_error_set_handler(c_error_handler)
yield
asound.snd_lib_error_set_handler(None)
| Enforce flake8 and NOQA cases | Enforce flake8 and NOQA cases
| Python | bsd-2-clause | shichao-an/soundmeter |
00eb518f9caeab4df20f08a08826aac57f23300e | notescli/core.py | notescli/core.py | import argparse
import yaml
import shutil
import os
from subprocess import call
from os.path import expanduser, isdir
from config import load_config
import cliparser
import indexer
import io
import commands
def main():
config = load_config("~/.notes-cli/config.yaml")
options = cliparser.parse_options()
index = indexer.create_or_load_index(config)
if options.command == "ls":
commands.command_ls(index)
elif options.command == "view":
commands.command_view(index, options.query)
elif options.command == "add":
commands.command_add(config, options.file)
commands.command_reindex(config)
elif options.command == "edit":
commands.command_edit(index, options.query)
commands.command_reindex(config)
elif options.command == "rm":
commands.command_rm(index, options.query)
commands.command_reindex(config)
elif options.command == "reindex":
commands.command_reindex(config)
else:
print "Not supported"
if __name__ == "__main__":
main()
| import argparse
import yaml
import shutil
import os
from subprocess import call
from os.path import expanduser, isdir
from config import load_config
import cliparser
import indexer
import io
import commands
def main():
config = load_config("~/.notes-cli/config.yaml")
options = cliparser.parse_options()
index = indexer.create_or_load_index(config)
if options.command == "ls":
commands.command_ls(index)
elif options.command == "view":
commands.command_view(index, options.query)
elif options.command == "add":
commands.command_add(config, options.file)
elif options.command == "edit":
commands.command_edit(index, options.query)
elif options.command == "rm":
commands.command_rm(index, options.query)
elif options.command == "reindex":
commands.command_reindex(config)
else:
print "Not supported"
if __name__ == "__main__":
main()
| Remove reindex calls after changes | Remove reindex calls after changes
Already reindex notes on initialization
| Python | mit | phss/notes-cli |
b8e1e5a926bc11d2d8ea975ad24496fca444f09e | betainvite/management/commands/send_waitlist_invites.py | betainvite/management/commands/send_waitlist_invites.py | from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
help = "Send invitations to people on the waiting list"
option_list = BaseCommand.option_list + (
make_option('-c', '--count',
type="int",
default = 10,
dest="count",
help='number of new invitations'),
)
def handle(self, *args, **options):
from betainvite.models import WaitingListEntry
count = options.get('count')
entries = WaitingListEntry.objects.filter(invited=False)[:count]
for entry in entries:
entry.send_invitation()
| from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
help = "Send invitations to people on the waiting list"
def handle(self, *args, **options):
try:
count = args[0]
except IndexError:
print u'usage :', __name__.split('.')[-1], 'number_of_invitations'
return
from betainvite.models import WaitingListEntry
entries = WaitingListEntry.objects.filter(invited=False)[:count]
print "Sending invitations to %d people" % (entries.count())
for entry in entries:
print "Sending invitation to %s" % (entry.email)
entry.send_invitation()
| Add console log send invitations command | Add console log send invitations command
| Python | bsd-3-clause | euanlau/django-betainvite |
569dbdc820d9ead02a8941d69b1c8143fe4d4cfa | pytest_pipeline/plugin.py | pytest_pipeline/plugin.py | # -*- coding: utf-8 -*-
"""
pytest_pipeline.plugin
~~~~~~~~~~~~~~~~~~~~~~
pytest plugin entry point.
:copyright: (c) 2014 Wibowo Arindrarto <[email protected]>
:license: BSD
"""
## credits to Holger Krekel himself for these xfail marking functions
## http://stackoverflow.com/a/12579625/243058
def pytest_runtest_makereport(item, call):
if "xfail_pipeline" in item.keywords:
if call.excinfo is not None:
parent = item.parent
parent._previousfailed = item
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption("--base-pipeline-dir", dest="base_pipeline_dir",
default=None, metavar="dir",
help="Base directory to put all pipeline test directories")
group.addoption("--xfail-pipeline", dest="xfail_pipeline", action="store_true",
default=False,
help="Whether to fail a class immediately if any of its tests fail")
group.addoption("--skip-run", dest="skip_run", action="store_true",
default=False,
help="Whether to skip the pipeline run and all tests after it")
| # -*- coding: utf-8 -*-
"""
pytest_pipeline.plugin
~~~~~~~~~~~~~~~~~~~~~~
pytest plugin entry point.
:copyright: (c) 2014 Wibowo Arindrarto <[email protected]>
:license: BSD
"""
## credits to Holger Krekel himself for these xfail marking functions
## http://stackoverflow.com/a/12579625/243058
def pytest_runtest_makereport(item, call):
if "xfail_pipeline" in item.keywords:
if call.excinfo is not None:
parent = item.parent
parent._previousfailed = item
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption("--base-pipeline-dir", dest="base_pipeline_dir",
default=None, metavar="dir",
help="Base directory to put all pipeline test directories")
group.addoption("--xfail-pipeline", dest="xfail_pipeline", action="store_true",
default=False,
help="Whether to fail a class immediately if any of its tests fail")
| Remove unused 'skip_run' option flag | Remove unused 'skip_run' option flag
| Python | bsd-3-clause | bow/pytest-pipeline |
4f2a3f26b8b0ec1f62e036f0bd9d15d71a628e0c | mamba/formatters.py | mamba/formatters.py | # -*- coding: utf-8 -*-
from clint.textui import indent, puts, colored
from mamba import spec
class DocumentationFormatter(object):
def __init__(self):
self.has_failed_tests = False
self.total_specs = 0
self.total_seconds = .0
def format(self, item):
puts(colored.white(item.name))
self._format_children(item)
def _format_children(self, item):
for spec_ in item.specs:
if isinstance(spec_, spec.Suite):
self.format_suite(spec_)
else:
self.format_spec(spec_)
def format_suite(self, suite):
with indent(1 + suite.depth):
puts(colored.white(suite.name))
self._format_children(suite)
def format_spec(self, spec_):
with indent(1 + spec_.depth):
symbol = colored.green('✓')
if spec_.failed:
symbol = colored.red('✗')
self.has_failed_tests = True
puts(symbol + ' ' + spec_.name.replace('_', ' '))
if spec_.failed:
with indent(spec_.depth + 2):
puts(colored.red(str(spec_.exception_caught())))
self.total_seconds += spec_.elapsed_time.total_seconds()
self.total_specs += 1
def format_summary(self):
puts()
color = colored.red if self.has_failed_tests else colored.green
puts(color("%d specs ran in %.4f seconds" % (self.total_specs, self.total_seconds)))
| # -*- coding: utf-8 -*-
from clint.textui import indent, puts, colored
from mamba import spec
class DocumentationFormatter(object):
def __init__(self):
self.has_failed_tests = False
self.total_specs = 0
self.total_seconds = .0
def format(self, item):
puts()
puts(colored.white(item.name))
self._format_children(item)
def _format_children(self, item):
for spec_ in item.specs:
if isinstance(spec_, spec.Suite):
self.format_suite(spec_)
else:
self.format_spec(spec_)
def format_suite(self, suite):
with indent(1 + suite.depth):
puts(colored.white(suite.name))
self._format_children(suite)
def format_spec(self, spec_):
with indent(1 + spec_.depth):
symbol = colored.green('✓')
if spec_.failed:
symbol = colored.red('✗')
self.has_failed_tests = True
puts(symbol + ' ' + spec_.name.replace('_', ' '))
if spec_.failed:
with indent(spec_.depth + 2):
puts(colored.red(str(spec_.exception_caught())))
self.total_seconds += spec_.elapsed_time.total_seconds()
self.total_specs += 1
def format_summary(self):
puts()
color = colored.red if self.has_failed_tests else colored.green
puts(color("%d specs ran in %.4f seconds" % (self.total_specs, self.total_seconds)))
| Put a blank line among main suites | Put a blank line among main suites
| Python | mit | alejandrodob/mamba,eferro/mamba,jaimegildesagredo/mamba,dex4er/mamba,angelsanz/mamba,nestorsalceda/mamba,markng/mamba |
f9f9111ddafb7dfd0554d541befd3cc660169689 | apps/redirects/urls.py | apps/redirects/urls.py | from django.conf.urls.defaults import *
from util import redirect
urlpatterns = patterns('',
redirect(r'^b2g', 'firefoxos'),
redirect(r'^b2g/faq', 'firefoxos'),
redirect(r'^b2g/about', 'firefoxos'),
)
| from django.conf.urls.defaults import *
from util import redirect
urlpatterns = patterns('',
redirect(r'^b2g', 'firefoxos.firefoxos'),
redirect(r'^b2g/faq', 'firefoxos.firefoxos'),
redirect(r'^b2g/about', 'firefoxos.firefoxos'),
)
| Fix view name for b2g redirects | Fix view name for b2g redirects
bug 792482
| Python | mpl-2.0 | dudepare/bedrock,rishiloyola/bedrock,mahinthjoe/bedrock,ckprice/bedrock,davehunt/bedrock,davidwboswell/documentation_autoresponse,jpetto/bedrock,dudepare/bedrock,glogiotatidis/bedrock,kyoshino/bedrock,mahinthjoe/bedrock,MichaelKohler/bedrock,ckprice/bedrock,analytics-pros/mozilla-bedrock,analytics-pros/mozilla-bedrock,MichaelKohler/bedrock,sylvestre/bedrock,CSCI-462-01-2017/bedrock,chirilo/bedrock,chirilo/bedrock,yglazko/bedrock,sgarrity/bedrock,SujaySKumar/bedrock,elin-moco/bedrock,kyoshino/bedrock,mmmavis/bedrock,jpetto/bedrock,andreadelrio/bedrock,davidwboswell/documentation_autoresponse,jpetto/bedrock,jacshfr/mozilla-bedrock,davehunt/bedrock,gauthierm/bedrock,Sancus/bedrock,ericawright/bedrock,gauthierm/bedrock,TheoChevalier/bedrock,mozilla/bedrock,gauthierm/bedrock,TheJJ100100/bedrock,ckprice/bedrock,TheoChevalier/bedrock,yglazko/bedrock,mmmavis/bedrock,flodolo/bedrock,bensternthal/bedrock,mmmavis/lightbeam-bedrock-website,glogiotatidis/bedrock,alexgibson/bedrock,glogiotatidis/bedrock,pascalchevrel/bedrock,MichaelKohler/bedrock,pmclanahan/bedrock,mmmavis/lightbeam-bedrock-website,jgmize/bedrock,sylvestre/bedrock,Sancus/bedrock,malena/bedrock,mozilla/mwc,kyoshino/bedrock,Jobava/bedrock,petabyte/bedrock,schalkneethling/bedrock,sgarrity/bedrock,gerv/bedrock,pascalchevrel/bedrock,CSCI-462-01-2017/bedrock,yglazko/bedrock,mozilla/mwc,jacshfr/mozilla-bedrock,jgmize/bedrock,mozilla/bedrock,Jobava/bedrock,mmmavis/bedrock,mkmelin/bedrock,glogiotatidis/bedrock,davidwboswell/documentation_autoresponse,petabyte/bedrock,marcoscaceres/bedrock,petabyte/bedrock,mermi/bedrock,jacshfr/mozilla-bedrock,davehunt/bedrock,CSCI-462-01-2017/bedrock,chirilo/bedrock,andreadelrio/bedrock,SujaySKumar/bedrock,ericawright/bedrock,sgarrity/bedrock,analytics-pros/mozilla-bedrock,TheJJ100100/bedrock,malena/bedrock,alexgibson/bedrock,craigcook/bedrock,malena/bedrock,mmmavis/bedrock,davehunt/bedrock,rishiloyola/bedrock,l-hedgehog/bedrock,mahinthjoe/bedrock,jacshfr/mozilla-bedrock,kyoshino/bedrock,flodolo/bedrock,pmclanahan/bedrock,gauthierm/bedrock,ericawright/bedrock,sgarrity/bedrock,andreadelrio/bedrock,bensternthal/bedrock,bensternthal/bedrock,mermi/bedrock,pmclanahan/bedrock,flodolo/bedrock,yglazko/bedrock,marcoscaceres/bedrock,mkmelin/bedrock,hoosteeno/bedrock,davidwboswell/documentation_autoresponse,amjadm61/bedrock,amjadm61/bedrock,TheoChevalier/bedrock,alexgibson/bedrock,Jobava/bedrock,jgmize/bedrock,amjadm61/bedrock,dudepare/bedrock,mozilla/bedrock,elin-moco/bedrock,schalkneethling/bedrock,petabyte/bedrock,mermi/bedrock,mmmavis/lightbeam-bedrock-website,craigcook/bedrock,pmclanahan/bedrock,rishiloyola/bedrock,analytics-pros/mozilla-bedrock,gerv/bedrock,alexgibson/bedrock,SujaySKumar/bedrock,dudepare/bedrock,andreadelrio/bedrock,amjadm61/bedrock,marcoscaceres/bedrock,elin-moco/bedrock,chirilo/bedrock,jacshfr/mozilla-bedrock,l-hedgehog/bedrock,gerv/bedrock,schalkneethling/bedrock,flodolo/bedrock,pascalchevrel/bedrock,bensternthal/bedrock,mozilla/bedrock,hoosteeno/bedrock,sylvestre/bedrock,Sancus/bedrock,jgmize/bedrock,hoosteeno/bedrock,sylvestre/bedrock,mahinthjoe/bedrock,TheJJ100100/bedrock,mozilla/mwc,ericawright/bedrock,Jobava/bedrock,rishiloyola/bedrock,craigcook/bedrock,amjadm61/bedrock,gerv/bedrock,mozilla/mwc,malena/bedrock,craigcook/bedrock,TheoChevalier/bedrock,schalkneethling/bedrock,TheJJ100100/bedrock,marcoscaceres/bedrock,mermi/bedrock,SujaySKumar/bedrock,ckprice/bedrock,pascalchevrel/bedrock,Sancus/bedrock,elin-moco/bedrock,l-hedgehog/bedrock,jpetto/bedrock,CSCI-462-01-2017/bedrock,l-hedgehog/bedrock,mkmelin/bedrock,hoosteeno/bedrock,mkmelin/bedrock,MichaelKohler/bedrock |
960eb0ce813988d8f90e76fbfd0485656cef541f | mff_rams_plugin/__init__.py | mff_rams_plugin/__init__.py | from uber.common import *
from ._version import __version__
from .config import *
from .models import *
from .model_checks import *
from .automated_emails import *
static_overrides(join(config['module_root'], 'static'))
template_overrides(join(config['module_root'], 'templates'))
mount_site_sections(config['module_root'])
c.MENU.append_menu_item(MenuItem(name='People', access=[c.PEOPLE, c.REG_AT_CON], submenu=[
MenuItem(name='Comped Badges', href='../mff_reports/comped_badges', access=c.PEOPLE),
])
)
| from uber.common import *
from ._version import __version__
from .config import *
from .models import *
from .model_checks import *
from .automated_emails import *
static_overrides(join(config['module_root'], 'static'))
template_overrides(join(config['module_root'], 'templates'))
mount_site_sections(config['module_root'])
c.MENU.append_menu_item(MenuItem(name='Midwest FurFest', access=c.PEOPLE, submenu=[
MenuItem(name='Comped Badges', href='../mff_reports/comped_badges'),
])
)
| Rename new admin dropdown menu | Rename new admin dropdown menu
| Python | agpl-3.0 | MidwestFurryFandom/mff-rams-plugin,MidwestFurryFandom/mff-rams-plugin |
0cd55ad979912112edb5e26381a2697f235c890a | teknologr/registration/mailutils.py | teknologr/registration/mailutils.py | from django.core.mail import send_mail
# TODO: check whether this should be sent from Phuxivator
def mailApplicantSubmission(context, sender='[email protected]'):
name = context['name']
receiver = context['email']
subject = 'Tack för din medlemsansökan till Teknologföreningen!'
message = '''Hej {name},
Tack för din medlemsansökan till Teknologföreningen!
För att bli en fullständig medlem så är nästa steg att delta i ett Nationsmöte (Namö).
Detta informeras mera senare.
Vid frågor eller ifall du inte ansökt om medlemskap, kontakt {sender}
Detta är ett automatiskt meddelande, du behöver inte svara på det.
'''.format(name=name, sender=sender)
return send_mail(
subject,
message,
sender,
[receiver],
fail_silently=False)
| from django.core.mail import send_mail
# TODO: check whether this should be sent from Phuxivator
def mailApplicantSubmission(context, sender='[email protected]'):
name = context['name']
receiver = context['email']
subject = 'Tack för din medlemsansökan till Teknologföreningen!'
message = '''Hej {name},
Tack för din medlemsansökan till Teknologföreningen!
För att bli en fullständig medlem så är nästa steg att delta i ett Nationsmöte (Namö).
Detta informeras mera senare.
Vid frågor eller ifall du inte ansökt om medlemskap, kontakta {sender}
Detta är ett automatiskt meddelande, du behöver inte svara på det.
'''.format(name=name, sender=sender)
return send_mail(
subject,
message,
sender,
[receiver],
fail_silently=False)
| Fix typo in automatic email | Fix typo in automatic email
| Python | mit | Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io,Teknologforeningen/teknologr.io |
d30c9f5d83c88890771a0046a59325450151eebd | lagesonum/__main__.py | lagesonum/__main__.py | # coding: utf-8
# Datei zum lokalen testen, PythonAnywhere verwendet bottle_app.py direkt
from bottle import run, debug
from bottle_app import application
#debug(True)
run(application, host='172.31.1.100', port=80, reloader=True)
| # coding: utf-8
# Datei zum lokalen testen, PythonAnywhere verwendet bottle_app.py direkt
from bottle import run, debug
from bottle_app import application
#debug(True)
run(application, host='127.0.0.1', port=8080, reloader=True)
| Enable development start at localhost | Enable development start at localhost
| Python | mit | christophmeissner/lagesonum,coders4help/lagesonum,fzesch/lagesonum,fzesch/lagesonum,coders4help/lagesonum,fzesch/lagesonum,coders4help/lagesonum,christophmeissner/lagesonum |
6196c1fe13df88c1d9f1fe706120c175ab890a1d | gen_tone.py | gen_tone.py | import math
import numpy
from demodulate.cfg import *
def gen_tone(pattern, WPM):
cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ
radians_per_sample = cycles_per_sample * 2 * math.pi
elements_per_second = WPM * 50.0 / 60.0
samples_per_element = int(SAMPLE_FREQ/elements_per_second)
length = samples_per_element * len(pattern)
# Empty returns array containing random stuff, so we NEED to overwrite it
data = numpy.empty(length, dtype=numpy.float32)
for i in xrange(length):
keyed = pattern[int(i/samples_per_element)]
#keyed = 1
data[i] = 0 if not keyed else (radians_per_sample * i)
data = numpy.sin(data)
return data
| import math
import numpy
from demodulate.cfg import *
def gen_tone(pattern, WPM):
cycles_per_sample = MORSE_FREQ/SAMPLE_FREQ
radians_per_sample = cycles_per_sample * 2 * math.pi
elements_per_second = WPM * 50.0 / 60.0
samples_per_element = int(SAMPLE_FREQ/elements_per_second)
length = samples_per_element * len(pattern)
# Empty returns array containing random stuff, so we NEED to overwrite it
data = numpy.empty(length, dtype=numpy.float32)
for i in xrange(length):
keyed = pattern[int(i/samples_per_element)]
#keyed = 1
data[i] = 0 if not keyed else (radians_per_sample * i)
data = numpy.sin(data)
data *= 2**16-1
data = numpy.array(data, dtype=numpy.int16)
return data
| Use 16 bit samples instead of float | Use 16 bit samples instead of float
| Python | mit | nickodell/morse-code |
da28458dffc3529f16cb222fce1676ddb0d87e05 | oembed/resources.py | oembed/resources.py | from django.utils.simplejson import simplejson
from oembed.exceptions import OEmbedException
class OEmbedResource(object):
"""
OEmbed resource, as well as a factory for creating resource instances
from response json
"""
_data = {}
content_object = None
def __getattr__(self, name):
return self._data.get(name)
def get_data(self):
return self._data
def load_data(self, data):
self._data = data
@property
def json(self):
return simplejson.dumps(self._data)
@classmethod
def create(cls, data):
if not 'type' in data or not 'version' in data:
raise OEmbedException('Missing required fields on OEmbed response.')
data['width'] = data.get('width') and int(data['width']) or None
data['height'] = data.get('height') and int(data['height']) or None
filtered_data = dict([(k, v) for k, v in data.items() if v])
resource = cls()
resource.load_data(filtered_data)
return resource
@classmethod
def create_json(cls, raw):
data = simplejson.loads(raw)
return cls.create(data)
| from django.utils import simplejson
from oembed.exceptions import OEmbedException
class OEmbedResource(object):
"""
OEmbed resource, as well as a factory for creating resource instances
from response json
"""
_data = {}
content_object = None
def __getattr__(self, name):
return self._data.get(name)
def get_data(self):
return self._data
def load_data(self, data):
self._data = data
@property
def json(self):
return simplejson.dumps(self._data)
@classmethod
def create(cls, data):
if not 'type' in data or not 'version' in data:
raise OEmbedException('Missing required fields on OEmbed response.')
data['width'] = data.get('width') and int(data['width']) or None
data['height'] = data.get('height') and int(data['height']) or None
filtered_data = dict([(k, v) for k, v in data.items() if v])
resource = cls()
resource.load_data(filtered_data)
return resource
@classmethod
def create_json(cls, raw):
data = simplejson.loads(raw)
return cls.create(data)
| Use the simplejson bundled with django | Use the simplejson bundled with django
| Python | mit | 0101/djangoembed,worldcompany/djangoembed,akvo/djangoembed,akvo/djangoembed,worldcompany/djangoembed,d4nielcosta/djangoembed,0101/djangoembed,d4nielcosta/djangoembed |
1cb201c57c592ebd014910fe225fa594cd87c745 | opendebates/middleware.py | opendebates/middleware.py | from opendebates.utils import get_site_mode
class SiteModeMiddleware(object):
"""
Gets or creates a SiteMode for the request, based on the hostname.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
request.site_mode = get_site_mode(request)
| from opendebates.utils import get_site_mode
class SiteModeMiddleware(object):
"""
Gets or creates a SiteMode for the request, based on the hostname.
"""
def process_request(self, request):
request.site_mode = get_site_mode(request)
| Make sure that the site mode is populated on the request | Make sure that the site mode is populated on the request
even if the request winds up getting dispatched to a flatpage.
| Python | apache-2.0 | caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates,caktus/django-opendebates |
9651c0278d93bf5c4620e198baac975f0c84e9a0 | src/unittest/stattestmain.py | src/unittest/stattestmain.py | def main():
from _m5.stattest import stattest_init, stattest_run
import m5.stats
stattest_init()
# Initialize the global statistics
m5.stats.initSimStats()
m5.stats.initText("cout")
# We're done registering statistics. Enable the stats package now.
m5.stats.enable()
# Reset to put the stats in a consistent state.
m5.stats.reset()
stattest_run()
m5.stats.dump()
| def main():
from _m5.stattest import stattest_init, stattest_run
import m5.stats
stattest_init()
# Initialize the global statistics
m5.stats.initSimStats()
m5.stats.addStatVisitor("cout")
# We're done registering statistics. Enable the stats package now.
m5.stats.enable()
# Reset to put the stats in a consistent state.
m5.stats.reset()
stattest_run()
m5.stats.dump()
| Fix the stats unit test. | tests: Fix the stats unit test.
This has been broken since February. The interface for opening
initializing where the stats output should go was changed, but the
test wasn't updated.
Change-Id: I54bd8be15bf870352d5fcfad95ded28d87c7cc5a
Reviewed-on: https://gem5-review.googlesource.com/6001
Reviewed-by: Andreas Sandberg <[email protected]>
Maintainer: Andreas Sandberg <[email protected]>
| Python | bsd-3-clause | TUD-OS/gem5-dtu,gem5/gem5,TUD-OS/gem5-dtu,gem5/gem5,gem5/gem5,TUD-OS/gem5-dtu,TUD-OS/gem5-dtu,gem5/gem5,TUD-OS/gem5-dtu,gem5/gem5,TUD-OS/gem5-dtu,gem5/gem5,gem5/gem5,TUD-OS/gem5-dtu |
c9f1335bff52e54f90eed151a273879b0f5144ea | test/test_commonsdowloader.py | test/test_commonsdowloader.py | #!/usr/bin/env python
# -*- coding: latin-1 -*-
"""Unit tests."""
import unittest
import commonsdownloader
class TestCommonsDownloader(unittest.TestCase):
"""Testing methods from commonsdownloader."""
def test_clean_up_filename(self):
"""Test clean_up_filename."""
values = [('Example.jpg', 'Example.jpg'),
('Example.jpg ', 'Example.jpg'),
(' Example.jpg', 'Example.jpg'),
('My Example.jpg', 'My_Example.jpg')]
for (input_value, expected_value) in values:
self.assertEqual(commonsdownloader.clean_up_filename(input_value),
expected_value)
def test_make_thumb_url(self):
"""Test make_thumb_url."""
input_value = ('My_Example.jpg', 100)
expected_value = "http://commons.wikimedia.org/w/thumb.php?f=My_Example.jpg&width=100"
output = commonsdownloader.make_thumb_url(*input_value)
self.assertEqual(output, expected_value)
if __name__ == "__main__":
unittest.main()
| #!/usr/bin/env python
# -*- coding: latin-1 -*-
"""Unit tests."""
import unittest
import commonsdownloader
class TestCommonsDownloader(unittest.TestCase):
"""Testing methods from commonsdownloader."""
def test_clean_up_filename(self):
"""Test clean_up_filename."""
values = [('Example.jpg', 'Example.jpg'),
('Example.jpg ', 'Example.jpg'),
(' Example.jpg', 'Example.jpg'),
('My Example.jpg', 'My_Example.jpg')]
for (input_value, expected_value) in values:
self.assertEqual(commonsdownloader.clean_up_filename(input_value),
expected_value)
def test_make_thumb_url(self):
"""Test make_thumb_url."""
input_value = ('My_Example.jpg', 100)
expected_value = "http://commons.wikimedia.org/w/thumb.php?f=My_Example.jpg&width=100"
output = commonsdownloader.make_thumb_url(*input_value)
self.assertEqual(output, expected_value)
def test_make_thumbnail_name(self):
"""Test make_thumbnail_name."""
input_value = ('Example.svg', 'png')
expected_value = "Example.png"
output = commonsdownloader.make_thumbnail_name(*input_value)
self.assertEqual(output, expected_value)
if __name__ == "__main__":
unittest.main()
| Add unit test for make_thumbnail_name() | Add unit test for make_thumbnail_name()
| Python | mit | Commonists/CommonsDownloader |
93f61fa8eb526763ddaf3de476cee6643f044908 | stringer/utils/file_utils.py | stringer/utils/file_utils.py | null | '''
Utilities to search files and retain meta data about files.
'''
import logging
import os
import mmap
def map_file(path=None):
logging.debug("map_file: " + path)
file_map = ""
if path is None or path is os.path.isfile(path):
logging.error('generate string is None')
logging.error('path is not a path.')
else:
with open(path) as infile:
file_map = mmap.mmap(infile, 0, access=mmap.ACCESS_READ)
return file_map
def mask_mmap(file_map=None, mask_model=None):
logging.debug("file_map: " + file_map)
masked_line = ""
if file_map is None:
logging.error("file_map is None")
else:
for line in iter(file_map.readline, ""):
masked_line = mask_line(line, mask_model)
def mask_line(line=None, mask_model=None):
logging.debug("mask_line processing.")
new_line = ""
if line is None or line is mask_model:
logging.error("line and or mask_model is None")
else:
for mask in mask_model:
print(mask)
return new_line
| Write three functions to begin the feature to mask values of some keyvalue pairs. Publish to begin writing the tests and making these work and process better. Write it through. | Write three functions to begin the feature to mask values of some keyvalue pairs. Publish to begin writing the tests and making these work and process better. Write it through.
| Python | apache-2.0 | kalaboster/stringer,kalaboster/stringer |
61accbe3fa6ebdeed3bbf48573d5ac5412d0f1db | app/status/views.py | app/status/views.py | import os
from flask import jsonify, current_app, request
from sqlalchemy.exc import SQLAlchemyError
from . import status
from . import utils
from dmutils.status import get_flags
@status.route('/_status')
def status_no_db():
if 'ignore-dependencies' in request.args:
return jsonify(
status="ok",
), 200
version = current_app.config['VERSION']
try:
return jsonify(
status="ok",
version=version,
db_version=utils.get_db_version(),
flags=get_flags(current_app)
)
except SQLAlchemyError:
current_app.logger.exception('Error connecting to database')
return jsonify(
status="error",
version=version,
message="Error connecting to database",
flags=get_flags(current_app)
), 500
| from flask import jsonify, current_app, request
from sqlalchemy.exc import SQLAlchemyError
from . import status
from . import utils
from ..models import Framework
from dmutils.status import get_flags
@status.route('/_status')
def status_no_db():
if 'ignore-dependencies' in request.args:
return jsonify(
status="ok",
), 200
version = current_app.config['VERSION']
try:
return jsonify(
status="ok",
frameworks={f.slug: f.status for f in Framework.query.all()},
version=version,
db_version=utils.get_db_version(),
flags=get_flags(current_app)
)
except SQLAlchemyError:
current_app.logger.exception('Error connecting to database')
return jsonify(
status="error",
version=version,
message="Error connecting to database",
flags=get_flags(current_app)
), 500
| Add framework status to API /_status | Add framework status to API /_status
To figure out current framework statuses for the given environment
you either need access to the API token or you'd have to look through
a number of frontend pages to infer the status from.
Framework status is a part of almost every request to the API, so
it should always be available for a working API instance and it makes
sense to add it to the /_status page.
Adding it to the /_status page creates an easier way to get the list
of all framework statuses.
| Python | mit | alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api |
43175d338f9a8e7eb779421bb4e1aa3bec4a94f1 | mediacrush/network.py | mediacrush/network.py | import json
from flask import request, current_app, redirect
from flaskext.bcrypt import generate_password_hash
def get_ip():
ip = request.remote_addr
if ip == '127.0.0.1' or ip == '127.0.0.2' and "X-Real-IP" in request.headers:
ip = request.headers.get("X-Real-IP")
return ip
def makeMask(n):
"return a mask of n bits as a long integer"
return (2 << n - 1) - 1
def dottedQuadToNum(ip):
"convert decimal dotted quad string to long integer"
parts = ip.split(".")
return int(parts[0]) | (int(parts[1]) << 8) | (int(parts[2]) << 16) | (int(parts[3]) << 24)
def networkMask(ip, bits):
"Convert a network address to a long integer"
return dottedQuadToNum(ip) & makeMask(bits)
def addressInNetwork(ip, net):
"Is an address in a network"
return ip & net == net
def secure_ip():
ip = get_ip()
if ip == '127.0.0.3' and not current_app.debug:
return 'anonymous_user'
return generate_password_hash(ip)
def is_tor():
return get_ip() == '127.0.0.3'
| import json
from flask import request, current_app, redirect
from flaskext.bcrypt import generate_password_hash
def get_ip():
ip = request.remote_addr
if ip == '127.0.0.1' or ip == '127.0.0.2' and "X-Real-IP" in request.headers:
ip = request.headers.get("X-Real-IP")
return ip
def makeMask(n):
"return a mask of n bits as a long integer"
return (2 << n - 1) - 1
def dottedQuadToNum(ip):
"convert decimal dotted quad string to long integer"
parts = ip.split(".")
return int(parts[0]) | (int(parts[1]) << 8) | (int(parts[2]) << 16) | (int(parts[3]) << 24)
def networkMask(ip, bits):
"Convert a network address to a long integer"
return dottedQuadToNum(ip) & makeMask(bits)
def addressInNetwork(ip, net):
"Is an address in a network"
return ip & net == net
def secure_ip():
ip = get_ip()
if ip == '127.0.0.3' and not current_app.debug:
return 'anonymous_user'
return generate_password_hash(ip)
def is_tor():
return get_ip() == '5.254.104.62'
| Update IP address Tor traffic comes from | Update IP address Tor traffic comes from
| Python | mit | nerdzeu/NERDZCrush,roderickm/MediaCrush,MediaCrush/MediaCrush,nerdzeu/NERDZCrush,roderickm/MediaCrush,MediaCrush/MediaCrush,roderickm/MediaCrush,nerdzeu/NERDZCrush |
bb6a4659527077413845e912e53bea5ee9327293 | content/test/gpu/gpu_tests/memory_expectations.py | content/test/gpu/gpu_tests/memory_expectations.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class MemoryExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Memory.CSS3D',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
self.Fail('Memory.CSS3D', ['mac', ('nvidia', 0x0fd5)], bug=368037)
# TODO(vmpstr): Memory drops and increases again, and this
# particular bot happens to catch it when its low. Remove
# once the bug is fixed.
self.Fail('Memory.CSS3D', ['win'], bug=373098)
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class MemoryExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Memory.CSS3D',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
self.Fail('Memory.CSS3D', ['mac', ('nvidia', 0x0fd5)], bug=368037)
# TODO(vmpstr): Memory drops and increases again, and this
# particular bot happens to catch it when its low. Remove
# once the bug is fixed.
self.Fail('Memory.CSS3D', ['win'], bug=373098)
# Test has turned flaky on Linux also. Remove once the bug is fixed.
self.Fail('Memory.CSS3D', ['linux'], bug=373098)
| Add a failure expectation to Linux memory.css3d test. | Add a failure expectation to Linux memory.css3d test.
BUG=373098
NOTRY=true
[email protected]
Review URL: https://codereview.chromium.org/303503009
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@273109 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,Just-D/chromium-1,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,dednal/chromium.src,ondra-novak/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,ondra-novak/chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,jaruba/chromium.src,Just-D/chromium-1,bright-sparks/chromium-spacewalk,jaruba/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk,Just-D/chromium-1,Jonekee/chromium.src,fujunwei/chromium-crosswalk,littlstar/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,ltilve/chromium,markYoungH/chromium.src,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,dushu1203/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,dednal/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,ondra-novak/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,Chilledheart/chromium,jaruba/chromium.src,fujunwei/chromium-crosswalk,ltilve/chromium,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,littlstar/chromium.src,littlstar/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,Chilledheart/chromium,hgl888/chromium-crosswalk,jaruba/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,ltilve/chromium,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,M4sse/chromium.src,littlstar/chromium.src,dednal/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,M4sse/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,Just-D/chromium-1,Jonekee/chromium.src,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,ltilve/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Just-D/chromium-1,krieger-od/nwjs_chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,ltilve/chromium,Fireblend/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Fireblend/chromium-crosswalk |
8d1a4869286735a55773ce0c074349bb0cafd3aa | ca_on_ottawa/people.py | ca_on_ottawa/people.py | # coding: utf-8
from utils import CSVScraper
class OttawaPersonScraper(CSVScraper):
csv_url = 'http://data.ottawa.ca/en/dataset/fd26ae83-fe1a-40d8-8951-72df40021c82/resource/33a437d3-a06d-4c56-a7fe-4fd622364ce6/download/elected-officials-282014-201829-v.2.csv'
| # coding: utf-8
from utils import CSVScraper
class OttawaPersonScraper(CSVScraper):
csv_url = 'http://data.ottawa.ca/en/dataset/fd26ae83-fe1a-40d8-8951-72df40021c82/resource/33a437d3-a06d-4c56-a7fe-4fd622364ce6/download/elected-officials-282014-201829-v.2.csv'
corrections = {
'district name': {
"Orl\u0082ans": 'Orléans',
},
} | Use corrections, as none of utf-8, iso-8859-1 or windows-1252 work | ca_on_ottawa: Use corrections, as none of utf-8, iso-8859-1 or windows-1252 work
| Python | mit | opencivicdata/scrapers-ca,opencivicdata/scrapers-ca |
9be4329b0586047f9184f04ca2e331dbd871ab56 | casepro/rules/views.py | casepro/rules/views.py | from dash.orgs.views import OrgPermsMixin
from smartmin.views import SmartCRUDL, SmartListView
from .models import Rule
class RuleCRUDL(SmartCRUDL):
"""
Simple CRUDL for debugging by superusers, i.e. not exposed to regular users for now
"""
model = Rule
actions = ("list",)
class List(OrgPermsMixin, SmartListView):
fields = ("tests", "actions")
def get_queryset(self, **kwargs):
return self.model.objects.filter(org=self.request.org).order_by("pk")
def get_tests(self, obj):
return obj.get_tests_description()
def get_actions(self, obj):
return obj.get_actions_description()
| from dash.orgs.views import OrgPermsMixin
from smartmin.views import SmartCRUDL, SmartListView
from .models import Rule
class RuleCRUDL(SmartCRUDL):
"""
Simple CRUDL for debugging by superusers, i.e. not exposed to regular users for now
"""
model = Rule
actions = ("list",)
class List(OrgPermsMixin, SmartListView):
fields = ("tests", "actions")
def get_queryset(self, **kwargs):
return self.model.objects.filter(org=self.request.org).order_by("id")
| Fix coverage by removing unused lines | Fix coverage by removing unused lines
| Python | bsd-3-clause | rapidpro/casepro,rapidpro/casepro,rapidpro/casepro |
081e5c36cfa8505f1c639bb1e34a5b929b2d4076 | app/main/forms.py | app/main/forms.py | from flask_wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
| from flask.ext.wtf import Form
from wtforms import validators
from dmutils.forms import StripWhitespaceStringField
class EmailAddressForm(Form):
email_address = StripWhitespaceStringField('Email address', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class MoveUserForm(Form):
user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[
validators.DataRequired(message="Email can not be empty"),
validators.Email(message="Please enter a valid email address")
])
class EmailDomainForm(Form):
new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[
validators.DataRequired(message="The domain field can not be empty.")
])
| Update import to use new style as per SO answer | Update import to use new style as per SO answer
https://stackoverflow.com/questions/20032922/no-module-named-flask-ext-wtf
| Python | mit | alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend |
e326cef4ae66d4d2dd500e933ff4f7c6fc619b28 | fix-perm.py | fix-perm.py | #!/usr/bin/env python
"""fix-perm.py - Fix file permissions
"""
from __future__ import print_function
import os
import stat
import sys
if __name__ == '__main__':
for line in sys.stdin:
path = line.rstrip('\n')
if path == '':
continue
if not os.path.isfile(path):
continue
st = os.stat(path)
mode = st.st_mode
os.chmod(path, mode | stat.S_IRUSR | stat.S_IWUSR)
| #!/usr/bin/env python
"""fix-perm.py - Fix file permissions
"""
from __future__ import print_function
import os
import stat
import sys
if __name__ == '__main__':
for line in sys.stdin:
path = line.rstrip('\n')
if path == '':
continue
if not os.path.isfile(path):
continue
st = os.stat(path)
mode = int('644', 8)
if st.st_mode & stat.S_IXUSR != 0:
mode = int('755', 8)
with open(path) as f:
x = f.read(2)
if x == '#!':
mode = int('755', 8)
os.chmod(path, mode)
| Change permissions to either 644 or 755. | Change permissions to either 644 or 755.
| Python | isc | eliteraspberries/minipkg,eliteraspberries/minipkg |
b7b6fdbc270359e82a2f13f5257a0c2a3875c28f | src/foremast/slacknotify/slack_notification.py | src/foremast/slacknotify/slack_notification.py | """Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.info = {'app': app,
'env': env,
'properties': prop_path,
'timestamp': timestamp}
self.settings = get_properties(self.info['properties'])
self.info['config_commit_short'] = self.settings['pipeline'][
'config_commit'][0:11]
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
| """Notify Slack channel."""
import time
from ..utils import get_properties, get_template, post_slack_message
class SlackNotification:
"""Post slack notification.
Inform users about infrastructure changes to prod* accounts.
"""
def __init__(self, app=None, env=None, prop_path=None):
timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime())
self.settings = get_properties(prop_path)
short_commit_sha = self.settings['pipeline']['config_commit'][0:11]
self.info = {
'app': app,
'env': env,
'config_commit_short': short_commit_sha,
'timestamp': timestamp,
}
def post_message(self):
"""Send templated message to **#deployments-{env}**."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
channel = '#deployments-{}'.format(self.info['env'].lower())
post_slack_message(message, channel)
def notify_slack_channel(self):
"""Post message to a defined Slack channel."""
message = get_template(
template_file='slack-templates/pipeline-prepare-ran.j2',
info=self.info)
if self.settings['pipeline']['notifications']['slack']:
post_slack_message(
message, self.settings['pipeline']['notifications']['slack'])
| Move properties fetching before dict | fix: Move properties fetching before dict
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast |
0078bb14b85df519744371df89e243822a86ed4c | generate.py | generate.py | import random
import sys
population = bytes([i for i in range(256)])
if sys.argv[1] == 'reflector':
popset = set(population)
buffer = [None for i in range(256)]
for i in range(128):
x, y = random.sample(popset, 2)
popset.remove(x)
popset.remove(y)
buffer[x] = y
buffer[y] = x
print(bytes(buffer))
elif sys.argv[1] == 'rotor':
print(bytes(random.sample(population, 256)))
| import random
import sys
population = bytes([i for i in range(256)])
if sys.argv[1] == 'reflector':
print('WIRING')
popset = set(population)
buffer = [None for i in range(256)]
for i in range(128):
x, y = random.sample(popset, 2)
popset.remove(x)
popset.remove(y)
buffer[x] = y
buffer[y] = x
print(bytes(buffer))
elif sys.argv[1] == 'rotor':
print('WIRING')
print(bytes(random.sample(population, 256)))
print('NOTCHES')
print(random.sample(population, 3))
| Add a little more detail to the generator | Add a little more detail to the generator
| Python | mit | spgill/bitnigma |
211f1fdfe1d969df7c9762ba8e914d3ea829e9b4 | manual/conf.py | manual/conf.py | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For
# a full list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
#
# To see the default sample conf.py, run sphinx-quickstart in an empty
# directory. Most of the original comments and options were removed.
import sphinx_rtd_theme # noQA F401
import os
import sys
sys.path.append(os.path.abspath("./_ext"))
project = 'QPDF'
copyright = '2005-2021, Jay Berkenbilt'
author = 'Jay Berkenbilt'
# make_dist and the CI build lexically find the release version from this file.
release = '10.5.0'
version = release
extensions = [
'sphinx_rtd_theme',
'qpdf',
]
html_theme = 'sphinx_rtd_theme'
html_theme_options = {
"body_max_width": None,
}
html_logo = '../logo/qpdf.svg'
html_static_path = ['_static']
html_css_files = [
'css/wraptable.css',
]
highlight_language = 'none'
| # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For
# a full list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
#
# To see the default sample conf.py, run sphinx-quickstart in an empty
# directory. Most of the original comments and options were removed.
import sphinx_rtd_theme # noQA F401
import os
import sys
sys.path.append(os.path.abspath("./_ext"))
project = 'QPDF'
copyright = '2005-2021, Jay Berkenbilt'
author = 'Jay Berkenbilt'
# make_dist and the CI build lexically find the release version from this file.
release = '10.5.0'
version = release
extensions = [
'sphinx_rtd_theme',
'qpdf',
]
html_theme = 'sphinx_rtd_theme'
html_theme_options = {
"body_max_width": None,
}
html_logo = '../logo/qpdf.svg'
html_static_path = ['_static']
html_css_files = [
'css/wraptable.css',
]
latex_elements = {
'preamble': r'''
\sphinxDUC{2264}{$\leq$}
\sphinxDUC{2265}{$\geq$}
''',
}
highlight_language = 'none'
| Allow real <= and >= in LateX | Allow real <= and >= in LateX
| Python | apache-2.0 | jberkenbilt/qpdf,jberkenbilt/qpdf,jberkenbilt/qpdf,qpdf/qpdf,jberkenbilt/qpdf,qpdf/qpdf,jberkenbilt/qpdf,qpdf/qpdf,qpdf/qpdf,qpdf/qpdf |
aa720a34c918e3d6454a4cfcb4fa0548f9fbd078 | hggithub.py | hggithub.py |
# Mimic the hggit extension.
try:
from hggit import *
hggit_reposetup = reposetup
except ImportError:
# Allow this module to be imported without
# hg-git installed, eg for setup.py
pass
__version__ = "0.1.4"
def reposetup(ui, repo, **kwargs):
"""
Automatically adds Bitbucket->GitHub mirror paths to the repo.
Also creates a `master` bookmark for the `default` branch.
"""
if len(getattr(repo, "changelog", [])) == 0:
return
hggit_reposetup(ui, repo, **kwargs)
bb = "ssh://[email protected]/"
for pathname, path in ui.configitems("paths"):
if path.startswith(bb):
user, project = path.replace(bb, "").split("/", 1)
# Strip slash and everything after it,
# such as mq patch queue path.
project = project.split("/")[0]
for k, v in ui.configitems("github"):
if k == "username":
user = v
gh_path = "git+ssh://[email protected]/%s/%s.git" % (user, project)
if pathname == "default":
if "master" not in repo._bookmarks:
from mercurial.commands import bookmark
bookmark(ui, repo, mark="master", rev="default")
gh_pathname = "github"
else:
gh_pathname = "github-" + pathname
ui.setconfig("paths", gh_pathname, gh_path)
|
# Mimic the hggit extension.
try:
from hggit import *
hggit_reposetup = reposetup
except ImportError:
# Allow this module to be imported without
# hg-git installed, eg for setup.py
pass
__version__ = "0.1.4"
def reposetup(ui, repo, **kwargs):
"""
Automatically adds Bitbucket->GitHub mirror paths to the repo.
Also creates a `master` bookmark for the `default` branch.
"""
if len(getattr(repo, "changelog", [])) == 0:
return
hggit_reposetup(ui, repo, **kwargs)
bb = "ssh://[email protected]/"
for pathname, path in ui.configitems("paths"):
if path.startswith(bb):
user, project = path.replace(bb, "").split("/", 1)
# Strip slash and everything after it,
# such as mq patch queue path.
project = project.split("/")[0]
for k, v in ui.configitems("github"):
if k == "username":
user = v
gh_path = "git+ssh://[email protected]/%s/%s.git" % (user, project)
if pathname == "default":
if "master" not in repo._bookmarks:
from mercurial.commands import bookmark
bookmark(ui, repo, "master", rev="default")
gh_pathname = "github"
else:
gh_pathname = "github-" + pathname
ui.setconfig("paths", gh_pathname, gh_path)
| Update bookmark interface to work with mercurial 3.0.2 | Update bookmark interface to work with mercurial 3.0.2
--HG--
extra : transplant_source : Qn%AB4%08%F4%3D%60%0DDb%10%E1%9C%A2%82%00z%1D5
| Python | bsd-2-clause | stephenmcd/hg-github |
ad55d04d6688f75f0e441603668e0337a0333d76 | tests/test_validate.py | tests/test_validate.py | # -*- coding: utf-8 -*-
import pytest
from marshmallow import validate, ValidationError
def test_invalid_email():
invalid1 = "user@example"
with pytest.raises(ValidationError):
validate.email(invalid1)
invalid2 = "example.com"
with pytest.raises(ValidationError):
validate.email(invalid2)
invalid3 = "user"
with pytest.raises(ValidationError):
validate.email(invalid3)
with pytest.raises(ValidationError):
validate.email('@nouser.com')
def test_validate_email_none():
assert validate.email(None) is None
def test_validate_url_none():
assert validate.url(None) is None
| # -*- coding: utf-8 -*-
import pytest
from marshmallow import validate, ValidationError
def test_invalid_email():
invalid1 = "user@example"
with pytest.raises(ValidationError):
validate.email(invalid1)
invalid2 = "example.com"
with pytest.raises(ValidationError):
validate.email(invalid2)
invalid3 = "user"
with pytest.raises(ValidationError):
validate.email(invalid3)
with pytest.raises(ValidationError):
validate.email('@nouser.com')
def test_validate_email_none():
assert validate.email(None) is None
def test_validate_url_none():
assert validate.url(None) is None
def test_min_length():
with pytest.raises(ValidationError):
validate.length('foo', 4, 5)
assert validate.length('foo', 3, 5) == 'foo'
with pytest.raises(ValidationError):
validate.length([1, 2, 3], 4, 5)
assert validate.length([1, 2, 3], 3, 5) == [1, 2, 3]
with pytest.raises(ValidationError):
validate.length('foo', 5)
def test_max_length():
with pytest.raises(ValidationError):
validate.length('foo', 1, 2)
assert validate.length('foo', 1, 3) == 'foo'
with pytest.raises(ValidationError):
validate.length([1, 2, 3], 1, 2)
assert validate.length([1, 2, 3], 1, 3) == [1, 2, 3]
with pytest.raises(ValidationError):
validate.length('foo', None, 2)
def test_validate_length_none():
assert validate.length(None) is None
| Add length validator unit tests | Add length validator unit tests | Python | mit | maximkulkin/marshmallow,0xDCA/marshmallow,Tim-Erwin/marshmallow,xLegoz/marshmallow,marshmallow-code/marshmallow,VladimirPal/marshmallow,0xDCA/marshmallow,daniloakamine/marshmallow,dwieeb/marshmallow,mwstobo/marshmallow,quxiaolong1504/marshmallow,etataurov/marshmallow,Bachmann1234/marshmallow,bartaelterman/marshmallow |
98eaf33328814342cdf6a2e8379c87cd00c911ce | campaign/views.py | campaign/views.py | from django.core.urlresolvers import reverse
from django.shortcuts import redirect, render_to_response
from django.template import RequestContext
from campaign.forms import CampaignFormSet, ProspectusForm
from campaign.models import PROSPECTUS_FIELD_HELP
def create_edit_prospectus(request):
if request.method == 'POST':
prospectus_form = ProspectusForm(request.POST)
campaign_formset = CampaignFormSet(request.POST)
if prospectus_form.is_valid():
prospectus_form.save(commit=False)
if request.user.is_authenticated():
prospectus_form.instance.owner = request.user
if campaign_formset.is_valid():
prospectus_form.instance.save()
for campaign in campaign_formset.save(commit=False):
campaign.prospectus = prospectus_form.instance
campaign.save()
return redirect(reverse('index'))
else:
prospectus_form = ProspectusForm()
campaign_formset = CampaignFormSet()
return render_to_response('campaign/new_prospectus.html',
{'prospectus_form': prospectus_form,
'campaign_forms': campaign_formset,
'prospectus_help': PROSPECTUS_FIELD_HELP},
RequestContext(request))
| from django.core.urlresolvers import reverse
from django.shortcuts import redirect, render_to_response
from django.template import RequestContext
from campaign.forms import CampaignFormSet, ProspectusForm
from campaign.models import PROSPECTUS_FIELD_HELP, Campaign
def create_edit_prospectus(request):
if request.method == 'POST':
prospectus_form = ProspectusForm(request.POST)
campaign_formset = CampaignFormSet(request.POST,
queryset=Campaign.objects.none())
if prospectus_form.is_valid():
prospectus_form.save(commit=False)
if request.user.is_authenticated():
prospectus_form.instance.owner = request.user
if campaign_formset.is_valid():
prospectus_form.instance.save()
for campaign in campaign_formset.save(commit=False):
campaign.prospectus = prospectus_form.instance
campaign.save()
return redirect(reverse('index'))
else:
prospectus_form = ProspectusForm()
campaign_formset = CampaignFormSet(queryset=Campaign.objects.none())
return render_to_response('campaign/new_prospectus.html',
{'prospectus_form': prospectus_form,
'campaign_forms': campaign_formset,
'prospectus_help': PROSPECTUS_FIELD_HELP},
RequestContext(request))
| Update default queryset for formsets | Update default queryset for formsets | Python | mit | tdphillips/campaigns,tdphillips/campaigns |
547130e5f3717fd5bfd083be89afd361fdcdefc1 | van/contactology/tests/test_contactology.py | van/contactology/tests/test_contactology.py | import unittest
from simplejson import dumps
from twisted.trial.unittest import TestCase
from twisted.internet import defer
from mock import patch, Mock
from van.contactology import Contactology
class TestProxy(TestCase):
@defer.inlineCallbacks
def test_list_return(self):
patcher = patch('van.contactology.getPage')
getPage = patcher.start()
try:
proxy = Contactology('API Key')
getPage.return_value = dumps([])
out = yield proxy.Campaign_Find()
yield self.assertEquals(out, [])
finally:
patcher.stop()
| import unittest
from simplejson import dumps
from twisted.trial.unittest import TestCase
from twisted.internet import defer
from mock import patch, Mock
from van.contactology import Contactology, APIError
class TestProxy(TestCase):
@defer.inlineCallbacks
def test_list_return(self):
patcher = patch('van.contactology.getPage')
getPage = patcher.start()
try:
proxy = Contactology('API Key')
getPage.return_value = dumps([])
out = yield proxy.Campaign_Find()
yield self.assertEquals(out, [])
finally:
patcher.stop()
@defer.inlineCallbacks
def test_api_error(self):
patcher = patch('van.contactology.getPage')
getPage = patcher.start()
try:
proxy = Contactology('API Key')
getPage.return_value = dumps({'code': 221, 'message': 'Key not found', 'result': 'error'})
yield self.failUnlessFailure(proxy.List_Get_Active_Lists(), APIError)
finally:
patcher.stop()
| Test for exception raising on API error. | Test for exception raising on API error.
| Python | bsd-3-clause | jinty/van.contactology |
e4c7a9186ef90ab6af637dbfb2bf5331823e64d9 | kimochiconsumer/views.py | kimochiconsumer/views.py | from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('index')
import pprint
pprint.pprint(data)
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
| from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('index')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
| Remove pprint and add PEP-8 lf | Remove pprint and add PEP-8 lf
| Python | mit | matslindh/kimochi-consumer |
ee4f312e89fe262a682011da3a7881bfbf47fcdf | spacy/lang/ar/__init__.py | spacy/lang/ar/__init__.py | # coding: utf8
from __future__ import unicode_literals
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .punctuation import TOKENIZER_SUFFIXES
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ..norm_exceptions import BASE_NORMS
from ...language import Language
from ...attrs import LANG, NORM
from ...util import update_exc, add_lookups
class ArabicDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters.update(LEX_ATTRS)
lex_attr_getters[LANG] = lambda text: "ar"
lex_attr_getters[NORM] = add_lookups(
Language.Defaults.lex_attr_getters[NORM], BASE_NORMS
)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
stop_words = STOP_WORDS
suffixes = TOKENIZER_SUFFIXES
class Arabic(Language):
lang = "ar"
Defaults = ArabicDefaults
__all__ = ["Arabic"]
| # coding: utf8
from __future__ import unicode_literals
from .stop_words import STOP_WORDS
from .lex_attrs import LEX_ATTRS
from .punctuation import TOKENIZER_SUFFIXES
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ..norm_exceptions import BASE_NORMS
from ...language import Language
from ...attrs import LANG, NORM
from ...util import update_exc, add_lookups
class ArabicDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters.update(LEX_ATTRS)
lex_attr_getters[LANG] = lambda text: "ar"
lex_attr_getters[NORM] = add_lookups(
Language.Defaults.lex_attr_getters[NORM], BASE_NORMS
)
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
stop_words = STOP_WORDS
suffixes = TOKENIZER_SUFFIXES
writing_system = {"direction": "rtl", "has_case": False, "has_letters": True}
class Arabic(Language):
lang = "ar"
Defaults = ArabicDefaults
__all__ = ["Arabic"]
| Add writing_system to ArabicDefaults (experimental) | Add writing_system to ArabicDefaults (experimental)
| Python | mit | honnibal/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy |
87de1fce846d7f50017fba885725a0907d43275e | swf/querysets/__init__.py | swf/querysets/__init__.py | #! -*- coding:utf-8 -*-
from swf.querysets.activity import ActivityTypeQuerySet
from swf.querysets.domain import DomainQuerySet
from swf.querysets.workflow import (WorkflowTypeQuerySet,
WorkflowExecutionQuerySet)
| #! -*- coding:utf-8 -*-
from swf.querysets.activity import ActivityTypeQuerySet
from swf.querysets.domain import DomainQuerySet
from swf.querysets.history import HistoryQuerySet
from swf.querysets.workflow import (WorkflowTypeQuerySet,
WorkflowExecutionQuerySet)
| Add history qs to swf querysets modules | Add history qs to swf querysets modules
| Python | mit | botify-labs/python-simple-workflow,botify-labs/python-simple-workflow |
ebe6281773bd10ed2e6be9b20e257f0403e3cc74 | tests/test_decorators.py | tests/test_decorators.py | null | from name.decorators import jsonp
from mock import MagicMock
def test_jsonp_returns_without_status_code_200():
# Setup the mock view.
f = MagicMock()
f.__name__ = 'Wrapped View'
# Setup the mock response.
response = MagicMock()
response.status_code = 301
# Set the response as the return value for the mock
# view.
f.return_value = response
decorated_f = jsonp(f)
assert decorated_f(1)
def test_jsonp_returns_has_callback():
f = MagicMock()
f.__name__ = 'Wrapped View'
# Setup the mock request
request = MagicMock()
request.GET = dict(callback='init')
# Setup the mock response.
json = {"id": 1, "status": 200}
response = MagicMock(content=json, status_code=200)
f.return_value = response
decorated_f = jsonp(f)
result = decorated_f(request)
expected = 'init({0})'.format(json)
assert expected == result.content
def test_jsonp_request_does_not_have_callback():
f = MagicMock()
f.__name__ = 'Wrapped View'
request = MagicMock()
request.GET = dict()
json = {"id": 1, "status": 200}
response = MagicMock(content=json, status_code=200)
f.return_value = response
decorated_f = jsonp(f)
result = decorated_f(request)
# Here we assert the the content was not altered
# since we did not provide a callback.
assert json == result.content
| Add intial tests for decorators. | Add intial tests for decorators.
| Python | bsd-3-clause | damonkelley/django-name,unt-libraries/django-name,unt-libraries/django-name,damonkelley/django-name,unt-libraries/django-name,damonkelley/django-name |
3a0b844f33274f7d9c389dd89b21a953cb9c1510 | promgen/sender/webhook.py | promgen/sender/webhook.py | '''
Simple webhook bridge
Accepts alert json from Alert Manager and then POSTs individual alerts to
configured webhook destinations
'''
import logging
import requests
from promgen.sender import SenderBase
logger = logging.getLogger(__name__)
class SenderWebhook(SenderBase):
def _send(self, url, alert, data):
body = {
'prometheus': alert['generatorURL'],
'status': alert['status'],
'alertmanager': data['externalURL']
}
body.update(alert['labels'])
body.update(alert['annotations'])
requests.post(url, body).raise_for_status()
return True
| '''
Simple webhook bridge
Accepts alert json from Alert Manager and then POSTs individual alerts to
configured webhook destinations
'''
import logging
import requests
from promgen.sender import SenderBase
logger = logging.getLogger(__name__)
class SenderWebhook(SenderBase):
def _send(self, url, alert, data):
body = {
'prometheus': alert['generatorURL'],
'status': alert['status'],
'alertmanager': data['externalURL']
}
body.update(alert.get('labels', {}))
body.update(alert.get('annotations', {}))
requests.post(url, body).raise_for_status()
return True
| Fix case where annotations may not exist | Fix case where annotations may not exist
| Python | mit | kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen |
cfc50cb9e70b7a5358b36a54d4b3bc27a2cfb2cb | us_ignite/common/sanitizer.py | us_ignite/common/sanitizer.py | import bleach
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
'p',
'br',
'h3',
'h4',
'h5',
'h6',
]
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
}
ALLOWED_STYLES = []
def sanitize(text):
"""Cleans the HTML received."""
cleaned_text = bleach.clean(
text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES,
styles=ALLOWED_STYLES, strip=True)
return cleaned_text
| import bleach
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
'p',
'br',
'h3',
'h4',
'h5',
'h6',
'table',
'tr',
'th',
'td',
]
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
}
ALLOWED_STYLES = []
def sanitize(text):
"""Cleans the HTML received."""
cleaned_text = bleach.clean(
text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES,
styles=ALLOWED_STYLES, strip=True)
return cleaned_text
| Allow ``table`` attributes during HTML sanitation. | Allow ``table`` attributes during HTML sanitation.
Tables are part of the content expected to be added
in some of the resources in the site.
| Python | bsd-3-clause | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite |
aa1f421161e9afe20e0f28532d2b0327a8654a13 | Lib/distutils/__init__.py | Lib/distutils/__init__.py | """distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "1.0.4"
| """distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "2.4.0"
| Make the distutils version number the same as the python version. It must be literally contained here, because it is still possible to install this distutils in older Python versions. | Make the distutils version number the same as the python version. It
must be literally contained here, because it is still possible to
install this distutils in older Python versions.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator |
f7bb5a58774cdb6ecdfae12f7919ae2e3dfd8f8d | upsrv/conary_schema.py | upsrv/conary_schema.py | #!/usr/bin/python
# Copyright (c) 2006 rPath, Inc
# All rights reserved
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary.repository.netrepos.netserver import ServerConfig
from conary import dbstore
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
cnrPath = '/srv/conary/repository.cnr'
cfg = ServerConfig()
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
try:
cfg.read(cnrPath)
except cfgtypes.CfgEnvironmentError:
print "Error reading %s" % cnrPath
sys.exit(1)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
| #!/usr/bin/python
#
# Copyright (c) SAS Institute Inc.
#
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary import dbstore
from .config import UpsrvConfig
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
try:
cfg = UpsrvConfig.load()
except cfgtypes.CfgEnvironmentError:
print "Error reading config file"
sys.exit(1)
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
| Update conary migration script to deal with extended config | Update conary migration script to deal with extended config
| Python | apache-2.0 | sassoftware/rbm,sassoftware/rbm,sassoftware/rbm |
eb40e27b0699a717708dd9367ac91ac0326456fe | regress/tests.py | regress/tests.py | import subprocess
import sys
import unittest
from django.conf import settings
#
# Must configure settings before importing base.
#
db = 'testdjangodb1'
schema = 'django1'
user = 'django1'
passwd = 'django1'
settings.configure(
DEBUG=True,
DATABASE_NAME=db,
DATABASE_USER=user,
DATABASE_PASSWORD=passwd
)
sys.path.append('../')
import base
class TestCursor(unittest.TestCase):
def setUp(self):
cmd = './createdb.sh "%s" "%s" "%s" "%s"' % \
(db, user, passwd, schema)
try:
rc = subprocess.call(cmd, shell=True)
if rc == 0:
pass # normal
elif rc < 0:
self.fail("Child was terminated by signal %s" \
% (-rc,))
else:
self.fail("Child returned error code %s" \
% (rc,))
except OSError, e:
self.fail("Execution failed:", e)
def tearDown(self):
# XXX: delete database created in setup.
pass
def testcreate(self):
w = base.DatabaseWrapper({})
c = w.cursor()
self.failUnless(c)
if __name__ == '__main__':
unittest.main()
| import subprocess
import sys
import unittest
from django.conf import settings
#
# Must configure settings before importing base.
#
db = 'testdjangodb1'
schema = 'django1'
user = 'django1'
passwd = 'django1'
settings.configure(
DEBUG=True,
DATABASE_NAME=db,
DATABASE_USER=user,
DATABASE_PASSWORD=passwd
)
sys.path.append('../')
import base
class TestMonetDjango(unittest.TestCase):
def setUp(self):
cmd = './createdb.sh "%s" "%s" "%s" "%s"' % \
(db, user, passwd, schema)
try:
rc = subprocess.call(cmd, shell=True)
if rc == 0:
pass # normal
elif rc < 0:
self.fail("Child was terminated by signal %s" \
% (-rc,))
else:
self.fail("Child returned error code %s" \
% (rc,))
except OSError, e:
self.fail("Execution failed:", e)
def tearDown(self):
# XXX: delete database created in setup.
pass
def testcreate(self):
w = base.DatabaseWrapper({})
c = w.cursor()
self.failUnless(c)
if __name__ == '__main__':
unittest.main()
| Rename class to sync with new file name. | Rename class to sync with new file name.
| Python | isc | rutube/djonet,gijzelaerr/djonet |
130d73d64e6f4abe4946240a8e876891cb02182c | corehq/ex-submodules/pillow_retry/admin.py | corehq/ex-submodules/pillow_retry/admin.py | from django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected'
]
| from datetime import datetime
from django.contrib import admin
from pillow_retry.models import PillowError
@admin.register(PillowError)
class PillowErrorAdmin(admin.ModelAdmin):
model = PillowError
list_display = [
'pillow',
'doc_id',
'error_type',
'date_created',
'date_last_attempt',
'date_next_attempt'
]
list_filter = ('pillow', 'error_type')
actions = [
'delete_selected',
'reset_attempts',
]
def reset_attempts(self, request, queryset):
queryset.update(current_attempt=0, date_next_attempt=datetime.utcnow())
reset_attempts.short_description = "Reset Attempts"
| Add reset attempts to PillowError actions | Add reset attempts to PillowError actions
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq |
e9e632008db1eb2bbdbd989584b82255a10f8944 | CodeFights/arrayReplace.py | CodeFights/arrayReplace.py | #!/usr/local/bin/python
# Code Fights Add Border Problem
def arrayReplace(inputArray, elemToReplace, substitutionElem):
pass
def main():
pass
if __name__ == '__main__':
main()
| #!/usr/local/bin/python
# Code Fights Add Border Problem
def arrayReplace(inputArray, elemToReplace, substitutionElem):
return [x if x != elemToReplace else substitutionElem for x in inputArray]
def main():
tests = [
[[1, 2, 1], 1, 3, [3, 2, 3]],
[[1, 2, 3, 4, 5], 3, 0, [1, 2, 0, 4, 5]],
[[1, 1, 1], 1, 10, [10, 10, 10]]
]
for t in tests:
res = arrayReplace(t[0], t[1], t[2])
if t[3] == res:
print("PASSED: arrayReplace({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print("FAILED: arrayReplace({}, {}, {}) returned {}, should have returned {}"
.format(t[0], t[1], t[2], res, t[3]))
if __name__ == '__main__':
main()
| Solve Code Fights array replace problem | Solve Code Fights array replace problem
| Python | mit | HKuz/Test_Code |
9d93a7a5d474a8725125077ae888f2d586955489 | tests/cli/fsm/fsm_test.py | tests/cli/fsm/fsm_test.py | # Copyright 2015 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#import mock
#from pytest import raises
#
#import paasta_tools.cli.cmds.fsm as fsm
#from paasta_tools.utils import SystemPaastaConfig
| # Copyright 2015 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| Remove comments in fsm tests | Remove comments in fsm tests
| Python | apache-2.0 | Yelp/paasta,gstarnberger/paasta,somic/paasta,somic/paasta,Yelp/paasta,gstarnberger/paasta |
016d955319b6971fec42ac6ada1052f88d867cee | freepacktbook/__init__.py | freepacktbook/__init__.py | import os
from bs4 import BeautifulSoup
import requests
class FreePacktBook(object):
base_url = 'https://www.packtpub.com'
url = base_url + '/packt/offers/free-learning/'
def __init__(self, email=None, password=None):
self.session = requests.Session()
self.email = email
self.password = password
def claim_free_ebook(self):
response = self.session.post(self.url, {
'email': self.email,
'password': self.password,
'form_id': 'packt_user_login_form'})
parser = BeautifulSoup(response.text, 'html.parser')
claim_url = self.base_url + parser.find('div', {
'class': 'free-ebook'}).a['href']
response = self.session.get(claim_url)
assert response.status_code == 200
def claim_free_ebook():
client = FreePacktBook(
os.environ.get('PACKTPUB_EMAIL'), os.environ.get('PACKTPUB_PASSWORD'))
client.claim_free_ebook()
| import os
from bs4 import BeautifulSoup
import requests
class FreePacktBook(object):
base_url = 'https://www.packtpub.com'
url = base_url + '/packt/offers/free-learning/'
def __init__(self, email=None, password=None):
self.session = requests.Session()
self.email = email
self.password = password
def claim_free_ebook(self):
response = self.session.post(self.url, {
'email': self.email,
'password': self.password,
'form_id': 'packt_user_login_form'})
parser = BeautifulSoup(response.text, 'html.parser')
claim_url = self.base_url + parser.find('div', {
'class': 'free-ebook'}).a['href']
response = self.session.get(claim_url)
assert response.status_code == 200
def get_book_details(self):
response = self.session.get(self.url)
parser = BeautifulSoup(response.text, 'html.parser')
summary = parser.find('div', {'class': 'dotd-main-book-summary'})
title = summary.find('div', {'class': 'dotd-title'}).getText().strip()
description = summary.find('div', {'class': None}).getText().strip()
main_book_image = parser.find('div', {'class': 'dotd-main-book-image'})
image_url = 'https:%s' % main_book_image.img['src']
url = self.base_url + main_book_image.a['href']
return {'title': title, 'description': description,
'url': url, 'image_url': image_url}
def claim_free_ebook():
client = FreePacktBook(
os.environ.get('PACKTPUB_EMAIL'), os.environ.get('PACKTPUB_PASSWORD'))
client.claim_free_ebook()
| Add ability to get book details | Add ability to get book details
| Python | mit | bogdal/freepacktbook |
76b39021fb0171da6036ceaf7894e3ff18d259ae | src/syft/grid/client/request_api/worker_api.py | src/syft/grid/client/request_api/worker_api.py | # stdlib
from typing import Any
from typing import Dict
# third party
from pandas import DataFrame
# syft relative
from ...messages.infra_messages import CreateWorkerMessage
from ...messages.infra_messages import DeleteWorkerMessage
from ...messages.infra_messages import GetWorkerMessage
from ...messages.infra_messages import GetWorkersMessage
from ...messages.infra_messages import UpdateWorkerMessage
from .request_api import GridRequestAPI
class WorkerRequestAPI(GridRequestAPI):
response_key = "worker"
def __init__(self, send):
super().__init__(
create_msg=CreateWorkerMessage,
get_msg=GetWorkerMessage,
get_all_msg=GetWorkersMessage,
update_msg=UpdateWorkerMessage,
delete_msg=DeleteWorkerMessage,
send=send,
response_key=WorkerRequestAPI.response_key,
)
def __getitem__(self, key):
return self.get(worker_id=key)
def __delitem__(self, key):
self.delete(worker_id=key)
| # stdlib
from typing import Callable
# syft relative
from ...messages.infra_messages import CreateWorkerMessage
from ...messages.infra_messages import DeleteWorkerMessage
from ...messages.infra_messages import GetWorkerMessage
from ...messages.infra_messages import GetWorkersMessage
from ...messages.infra_messages import UpdateWorkerMessage
from .request_api import GridRequestAPI
class WorkerRequestAPI(GridRequestAPI):
response_key = "worker"
def __init__(self, send: Callable):
super().__init__(
create_msg=CreateWorkerMessage,
get_msg=GetWorkerMessage,
get_all_msg=GetWorkersMessage,
update_msg=UpdateWorkerMessage,
delete_msg=DeleteWorkerMessage,
send=send,
response_key=WorkerRequestAPI.response_key,
)
def __getitem__(self, key: int) -> object:
return self.get(worker_id=key)
def __delitem__(self, key: int) -> None:
self.delete(worker_id=key)
| Update Worker API - ADD type hints - Remove unused imports | Update Worker API
- ADD type hints
- Remove unused imports
| Python | apache-2.0 | OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft |
6153952ca9794ccb1dd5d76696aa2d4881a665c1 | tests/core/migrations/0004_bookwithchapters.py | tests/core/migrations/0004_bookwithchapters.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-09 10:26
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class PostgresOnlyCreateModel(migrations.CreateModel):
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_forwards(app_label, schema_editor, from_state, to_state)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_backwards(app_label, schema_editor, from_state, to_state)
class Migration(migrations.Migration):
dependencies = [
('core', '0003_withfloatfield'),
]
operations = [
PostgresOnlyCreateModel(
name='BookWithChapters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Book name')),
('chapters',
django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), default=list,
size=None)),
],
),
]
| from __future__ import unicode_literals
from django import VERSION
from django.db import migrations, models
if VERSION >= (1, 8):
from django.contrib.postgres.fields import ArrayField
chapters_field = ArrayField(base_field=models.CharField(max_length=100), default=list, size=None)
else:
chapters_field = models.Field() # Dummy field
class PostgresOnlyCreateModel(migrations.CreateModel):
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_forwards(app_label, schema_editor, from_state, to_state)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_backwards(app_label, schema_editor, from_state, to_state)
class Migration(migrations.Migration):
dependencies = [
('core', '0003_withfloatfield'),
]
operations = [
PostgresOnlyCreateModel(
name='BookWithChapters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Book name')),
('chapters', chapters_field)
],
),
]
| Add version check for importing django.contrib.postgres.fields.ArrayField | Add version check for importing django.contrib.postgres.fields.ArrayField
| Python | bsd-2-clause | daniell/django-import-export,jnns/django-import-export,django-import-export/django-import-export,bmihelac/django-import-export,copperleaftech/django-import-export,brillgen/django-import-export,PetrDlouhy/django-import-export,daniell/django-import-export,daniell/django-import-export,PetrDlouhy/django-import-export,PetrDlouhy/django-import-export,jnns/django-import-export,brillgen/django-import-export,daniell/django-import-export,copperleaftech/django-import-export,jnns/django-import-export,brillgen/django-import-export,brillgen/django-import-export,copperleaftech/django-import-export,bmihelac/django-import-export,PetrDlouhy/django-import-export,django-import-export/django-import-export,django-import-export/django-import-export,bmihelac/django-import-export,bmihelac/django-import-export,jnns/django-import-export,django-import-export/django-import-export,copperleaftech/django-import-export |
d317b27a5dac13900beb8f2674b0725313970a80 | nodeconductor/core/handlers.py | nodeconductor/core/handlers.py | from __future__ import unicode_literals
import logging
from nodeconductor.core.log import EventLoggerAdapter
logger = logging.getLogger(__name__)
event_logger = EventLoggerAdapter(logger)
def log_ssh_key_save(sender, instance, created=False, **kwargs):
if created:
event_logger.info(
'SSH key %s has been created.', instance.name,
extra={'ssh_key': instance, 'event_type': 'ssh_key_created'})
def log_ssh_key_delete(sender, instance, **kwargs):
event_logger.info(
'SSH key %s has been deleted.', instance.name,
extra={'ssh_key': instance, 'event_type': 'ssh_key_deleted'})
| from __future__ import unicode_literals
import logging
from nodeconductor.core.log import EventLoggerAdapter
logger = logging.getLogger(__name__)
event_logger = EventLoggerAdapter(logger)
def log_ssh_key_save(sender, instance, created=False, **kwargs):
if created:
event_logger.info(
'SSH key %s has been created.', instance.name,
extra={'ssh_key': instance, 'event_type': 'ssh_key_creation_succeeded'})
def log_ssh_key_delete(sender, instance, **kwargs):
event_logger.info(
'SSH key %s has been deleted.', instance.name,
extra={'ssh_key': instance, 'event_type': 'ssh_key_deletion_succeeded'})
| Rename event types for consistency | Rename event types for consistency
- NC-332
| Python | mit | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor |
fbcf9fbfe162b0f7491c5c89a2098a3f56bd6c6a | scripts/data_download/school_census/create_all_files.py | scripts/data_download/school_census/create_all_files.py | import os
import commands
import time
import logging
import sys
if len(sys.argv) != 3 or (sys.argv[1:][0] not in ['pt', 'en']):
print "ERROR! Use:\n python scripts/data_download/school_census/create_files.py en/pt output_path\n"
exit()
logging.basicConfig(filename=os.path.abspath(os.path.join(sys.argv[2],str(sys.argv[0].split('/')[2]) + '-all-data-download.log' )),level=logging.DEBUG)
for year in range(2007, 2016):
logging.info("python scripts/data_download/higher_education/create_files.py "+str(sys.argv[1])+" "+str(sys.argv[2])+" "+ str(year) + "\n")
ret = commands.getoutput("python scripts/data_download/school_census/create_files.py "+str(sys.argv[1])+" "+str(sys.argv[2])+" "+ str(year))
logging.info(str(ret) + "\nYear: " + str(year) + " ok =D\n\n") | import os
import commands
import time
import logging
import sys
if len(sys.argv) != 3 or (sys.argv[1:][0] not in ['pt', 'en']):
print "ERROR! Use:\n python scripts/data_download/school_census/create_files.py en/pt output_path\n"
exit()
logging.basicConfig(filename=os.path.abspath(os.path.join(sys.argv[2],str(sys.argv[0].split('/')[2]) + '-all-data-download.log' )),level=logging.DEBUG)
for year in range(2007, 2016):
logging.info("python scripts/data_download/school_census/create_files.py "+str(sys.argv[1])+" "+str(sys.argv[2])+" "+ str(year) + "\n")
ret = commands.getoutput("python scripts/data_download/school_census/create_files.py "+str(sys.argv[1])+" "+str(sys.argv[2])+" "+ str(year))
logging.info(str(ret) + "\nYear: " + str(year) + " ok =D\n\n") | Rename database in log file. | Rename database in log file.
| Python | mit | DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site |
fba983fa54691fcde0de93d6519b3906dff3cb32 | sara_flexbe_states/src/sara_flexbe_states/get_distance2D.py | sara_flexbe_states/src/sara_flexbe_states/get_distance2D.py | #!/usr/bin/env python
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(GetNumberFromText, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
| #!/usr/bin/env python
from flexbe_core import EventState, Logger
import rospy
import re
import ros
import math
class getDistance(EventState):
"""
Calcule la distance entre deux points donnes.
### InputKey
># point1
># point2
### OutputKey
#> distance
<= done
"""
def __init__(self):
"""Constructor"""
super(getDistance, self).__init__(outcomes = ['done'], input_keys = ['point1','point2'], output_keys = ['distance'])
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
userdata.distance= calculate_distance(userdata.point1,userdata.point2)
return 'done'
def calculate_distance(p1,p2):
return math.sqrt(math.pow(p2.x-p1.x,2)+math.pow(p2.y-p1.y,2))
| Correct call to super constructor | Correct call to super constructor
| Python | bsd-3-clause | WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors |
fcdcf2b997c4adebd852ce399492a76868e8b0ad | greenmine/base/monkey.py | greenmine/base/monkey.py | # -*- coding: utf-8 -*-
from rest_framework import views
from rest_framework import status, exceptions
from rest_framework.response import Response
def patch_api_view():
from django.views.generic import View
if hasattr(views, "_patched"):
return
views._APIView = views.APIView
views._patched = True
class APIView(views.APIView):
def handle_exception(self, exc):
if isinstance(exc, exceptions.NotAuthenticated):
return Response({'detail': 'Not authenticated'},
status=status.HTTP_401_UNAUTHORIZED,
exception=True)
return super(APIView, self).handle_exception(exc)
@classmethod
def as_view(cls, **initkwargs):
view = super(views._APIView, cls).as_view(**initkwargs)
view.cls_instance = cls(**initkwargs)
return view
print "Patching APIView"
views.APIView = APIView
| # -*- coding: utf-8 -*-
from __future__ import print_function
import sys
from rest_framework import views
from rest_framework import status, exceptions
from rest_framework.response import Response
def patch_api_view():
from django.views.generic import View
if hasattr(views, "_patched"):
return
views._APIView = views.APIView
views._patched = True
class APIView(views.APIView):
def handle_exception(self, exc):
if isinstance(exc, exceptions.NotAuthenticated):
return Response({'detail': 'Not authenticated'},
status=status.HTTP_401_UNAUTHORIZED,
exception=True)
return super(APIView, self).handle_exception(exc)
@classmethod
def as_view(cls, **initkwargs):
view = super(views._APIView, cls).as_view(**initkwargs)
view.cls_instance = cls(**initkwargs)
return view
print("Patching APIView", file=sys.stderr)
views.APIView = APIView
| Send print message to sys.stderr | Smallfix: Send print message to sys.stderr
| Python | agpl-3.0 | astronaut1712/taiga-back,gauravjns/taiga-back,obimod/taiga-back,gauravjns/taiga-back,Tigerwhit4/taiga-back,EvgeneOskin/taiga-back,rajiteh/taiga-back,CoolCloud/taiga-back,bdang2012/taiga-back-casting,jeffdwyatt/taiga-back,astronaut1712/taiga-back,frt-arch/taiga-back,bdang2012/taiga-back-casting,crr0004/taiga-back,CMLL/taiga-back,bdang2012/taiga-back-casting,jeffdwyatt/taiga-back,CoolCloud/taiga-back,frt-arch/taiga-back,CoolCloud/taiga-back,dayatz/taiga-back,rajiteh/taiga-back,crr0004/taiga-back,obimod/taiga-back,astagi/taiga-back,joshisa/taiga-back,taigaio/taiga-back,gauravjns/taiga-back,WALR/taiga-back,joshisa/taiga-back,crr0004/taiga-back,EvgeneOskin/taiga-back,19kestier/taiga-back,WALR/taiga-back,dycodedev/taiga-back,forging2012/taiga-back,19kestier/taiga-back,Zaneh-/bearded-tribble-back,bdang2012/taiga-back-casting,Rademade/taiga-back,forging2012/taiga-back,obimod/taiga-back,xdevelsistemas/taiga-back-community,gam-phon/taiga-back,joshisa/taiga-back,Rademade/taiga-back,EvgeneOskin/taiga-back,astagi/taiga-back,WALR/taiga-back,obimod/taiga-back,joshisa/taiga-back,dycodedev/taiga-back,coopsource/taiga-back,CMLL/taiga-back,taigaio/taiga-back,forging2012/taiga-back,CoolCloud/taiga-back,dycodedev/taiga-back,CMLL/taiga-back,crr0004/taiga-back,coopsource/taiga-back,Rademade/taiga-back,gam-phon/taiga-back,astagi/taiga-back,Zaneh-/bearded-tribble-back,frt-arch/taiga-back,coopsource/taiga-back,jeffdwyatt/taiga-back,seanchen/taiga-back,seanchen/taiga-back,taigaio/taiga-back,gauravjns/taiga-back,seanchen/taiga-back,gam-phon/taiga-back,astronaut1712/taiga-back,astagi/taiga-back,CMLL/taiga-back,Tigerwhit4/taiga-back,Tigerwhit4/taiga-back,rajiteh/taiga-back,dayatz/taiga-back,astronaut1712/taiga-back,Zaneh-/bearded-tribble-back,dayatz/taiga-back,dycodedev/taiga-back,rajiteh/taiga-back,seanchen/taiga-back,gam-phon/taiga-back,EvgeneOskin/taiga-back,Rademade/taiga-back,coopsource/taiga-back,xdevelsistemas/taiga-back-community,xdevelsistemas/taiga-back-community,19kestier/taiga-back,forging2012/taiga-back,WALR/taiga-back,Tigerwhit4/taiga-back,jeffdwyatt/taiga-back,Rademade/taiga-back |
06914af3d8df899947a53c2fe3b3ce1de208d04d | robot-framework-needle.py | robot-framework-needle.py | from needle.cases import NeedleTestCase
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
class TestLogo(NeedleTestCase):
def test_logo(self):
self.driver.get('http://www.bbc.co.uk/news/')
try:
WebDriverWait(self.driver, 20).until(
ec.presence_of_element_located((By.ID, "blq-mast"))
)
finally:
pass
self.assertScreenshot('#blq-mast', 'bbc-masthead') | from needle.cases import NeedleTestCase
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
class TestLogo(NeedleTestCase):
def test_logo(self):
self.driver.get('http://www.bbc.co.uk/news/')
try:
WebDriverWait(self.driver, 20).until(
ec.presence_of_element_located((By.ID, "idcta-link"))
)
finally:
pass
self.assertScreenshot('#idcta-link', 'bbc-masthead') | Fix locators used in needle example on BBC site | Fix locators used in needle example on BBC site
| Python | apache-2.0 | laurentbristiel/robotframework-needle |
061306d137b85ac59e182ffbba29d22bc8c624ba | characters/views.py | characters/views.py | from django.shortcuts import get_object_or_404, redirect, render
from django.views import generic
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
class CharacterIndexView(generic.ListView):
template_name = 'characters/index.html'
context_object_name = 'all_characters' # better than 'object_list'
def get_queryset(self):
return Character.objects.all()
class CharacterDetailView(generic.DetailView):
model = Character
template_name = 'characters/view_character.html'
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race_id=1,
cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
| from django.shortcuts import get_object_or_404, redirect, render
from django.views import generic
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
class CharacterIndexView(generic.ListView):
template_name = 'characters/index.html'
context_object_name = 'all_characters' # better than 'object_list'
def get_queryset(self):
return Character.objects.all().order_by('name')
class CharacterDetailView(generic.DetailView):
model = Character
template_name = 'characters/view_character.html'
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race_id=1,
cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
| Order character listing by name | Order character listing by name
| Python | mit | mpirnat/django-tutorial-v2 |
ede4689ce3f9e03db5f250617e793083333af3a5 | notification/backends/email.py | notification/backends/email.py |
from django.conf import settings
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from django.utils.translation import ugettext
from django.contrib.sites.models import Site
from notification import backends
from notification.message import message_to_text
# favour django-mailer but fall back to django.core.mail
try:
from mailer import send_mail
except ImportError:
from django.core.mail import send_mail
class EmailBackend(backends.BaseBackend):
def can_send(self, user, notice_type):
if should_send(user, notice_type, "1") and user.email:
return True
return False
def deliver(self, recipients, notice_type, message):
notices_url = u"http://%s%s" % (
unicode(Site.objects.get_current()),
reverse("notification_notices"),
)
subject = render_to_string("notification/notification_subject.txt", {
"display": ugettext(notice_type.display),
})
message_body = render_to_string("notification/notification_body.txt", {
"message": message_to_text(message),
"notices_url": notices_url,
"contact_email": settings.CONTACT_EMAIL,
})
send_mail(subject, message_body,
settings.DEFAULT_FROM_EMAIL, recipients)
|
from django.conf import settings
from django.db.models.loading import get_app
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from django.utils.translation import ugettext
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
from notification import backends
from notification.message import message_to_text
# favour django-mailer but fall back to django.core.mail
try:
mailer = get_app("mailer")
from mailer import send_mail
except ImproperlyConfigured:
from django.core.mail import send_mail
class EmailBackend(backends.BaseBackend):
def can_send(self, user, notice_type):
if should_send(user, notice_type, "1") and user.email:
return True
return False
def deliver(self, recipients, notice_type, message):
notices_url = u"http://%s%s" % (
unicode(Site.objects.get_current()),
reverse("notification_notices"),
)
subject = render_to_string("notification/notification_subject.txt", {
"display": ugettext(notice_type.display),
})
message_body = render_to_string("notification/notification_body.txt", {
"message": message_to_text(message),
"notices_url": notices_url,
"contact_email": settings.CONTACT_EMAIL,
})
send_mail(subject, message_body,
settings.DEFAULT_FROM_EMAIL, recipients)
| Use get_app over to include django-mailer support over a standard import and ImportError exception handling. | pluggable-backends: Use get_app over to include django-mailer support over a standard import and ImportError exception handling.
git-svn-id: 12265af7f62f437cb19748843ef653b20b846039@130 590c3fc9-4838-0410-bb95-17a0c9b37ca9
| Python | mit | brosner/django-notification,arctelix/django-notification-automated |