commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 1
2.94k
⌀ | new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
|
---|---|---|---|---|---|---|---|---|---|
5b6026bac75ae906d55410c583ebe4a756232dd7 | cla_backend/apps/cla_eventlog/management/commands/find_and_delete_old_cases.py | cla_backend/apps/cla_eventlog/management/commands/find_and_delete_old_cases.py | from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
self._setup()
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) == 0:
print("Number of cases to be deleted: " + str(cases.count()))
elif args[0] == "test_find":
return cases
elif args[0] == "delete":
instance.run()
| import sys
from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) and args[0] == "delete":
instance.run()
elif sys.argv[1] == "test":
return cases
else:
print("Number of cases to be deleted: " + str(cases.count()))
| Change functionality depending on where command is called from | Change functionality depending on where command is called from | Python | mit | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend |
53e89d0e7af03bd9f59a2e6bcbe6cdbfe8e8f50a | corehq/apps/custom_data_fields/management/commands/convert_custom_user_data.py | corehq/apps/custom_data_fields/management/commands/convert_custom_user_data.py | from django.core.management.base import BaseCommand
from corehq.apps.custom_data_fields.models import CustomDataFieldsDefinition, CustomDataField
from corehq.apps.users.models import CommCareUser
from corehq.apps.domain.models import Domain
from dimagi.utils.couch.database import iter_docs
class Command(BaseCommand):
"""
Create a CustomDataFieldsDefinition based on existing custom user
information on each domain
"""
help = ''
def handle(self, *args, **options):
for domain in Domain.get_all_names():
fields_definition = CustomDataFieldsDefinition.get_or_create(
domain,
'UserFields'
)
user_ids = (CommCareUser.ids_by_domain(domain) +
CommCareUser.ids_by_domain(domain, is_active=False))
existing_field_slugs = set([field.slug for field in fields_definition.fields])
for user in iter_docs(CommCareUser.get_db(), user_ids):
user_data = user.get('user_data', {})
for key in user_data.keys():
if key not in existing_field_slugs:
existing_field_slugs.add(key)
fields_definition.fields.append(CustomDataField(
slug=key,
label=key,
is_required=False
))
# Only save a definition for domains which use custom user data
if fields_definition.fields:
fields_definition.save()
| from django.core.management.base import BaseCommand
from corehq.apps.custom_data_fields.models import CustomDataFieldsDefinition, CustomDataField
from corehq.apps.users.models import CommCareUser
from corehq.apps.domain.models import Domain
from dimagi.utils.couch.database import iter_docs
class Command(BaseCommand):
"""
Create a CustomDataFieldsDefinition based on existing custom user
information on each domain
"""
help = ''
def handle(self, *args, **options):
for domain in Domain.get_all_names():
fields_definition = CustomDataFieldsDefinition.get_or_create(
domain,
'UserFields'
)
user_ids = (CommCareUser.ids_by_domain(domain) +
CommCareUser.ids_by_domain(domain, is_active=False))
existing_field_slugs = set([field.slug for field in fields_definition.fields])
for user in iter_docs(CommCareUser.get_db(), user_ids):
user_data = user.get('user_data', {})
for key in user_data.keys():
if key and key not in existing_field_slugs:
existing_field_slugs.add(key)
fields_definition.fields.append(CustomDataField(
slug=key,
label=key,
is_required=False
))
# Only save a definition for domains which use custom user data
if fields_definition.fields:
fields_definition.save()
| Drop empty field keys in migration | Drop empty field keys in migration
| Python | bsd-3-clause | puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq |
fe47651ddd32a5795dab28ca998230b042a70c59 | app/views/comment_view.py | app/views/comment_view.py | from flask_classy import FlaskView
from flask_user import current_user, login_required
from ..models import CommentModel, PostModel
from ..forms import CommentForm
class Comment(FlaskView):
def get(self):
pass
def all(self, post_id):
comment = CommentModel()
comment.query.add_filter('post_id', '=', int(post_id))
print(comment.fetch())
return "comment.fetch()"
@login_required
def post(self, post_id):
form = CommentForm()
if form.validate_on_submit():
post = PostModel().get(post_id)
post = PostModel(**post)
comment = CommentModel(user=current_user.username, **form.data)
comment.put()
post.add_comment(comment.id)
return "ALEYUYA"
return "form.errors"
| from flask import jsonify
from flask_classy import FlaskView
from flask_user import current_user, login_required
from ..models import CommentModel, PostModel
from ..forms import CommentForm
class Comment(FlaskView):
def get(self):
pass
def all(self, post_id):
comment = CommentModel()
comment.query.add_filter('post_id', '=', int(post_id))
return jsonify(comment.fetch())
@login_required
def post(self, post_id):
form = CommentForm()
if form.validate_on_submit():
post = PostModel().get(post_id)
post = PostModel(**post)
comment = CommentModel(user=current_user.username, **form.data)
comment.put()
post.add_comment(comment.id)
return "ALEYUYA"
return "form.errors"
| Comment all view return a json of all comments | Comment all view return a json of all comments
| Python | mit | oldani/nanodegree-blog,oldani/nanodegree-blog,oldani/nanodegree-blog |
5b9f0270aaa53a562ca65fa74769885621da4a8e | website/addons/s3/__init__.py | website/addons/s3/__init__.py | import os
from . import model
from . import routes
from . import views
MODELS = [model.AddonS3UserSettings, model.AddonS3NodeSettings, model.S3GuidFile]
USER_SETTINGS_MODEL = model.AddonS3UserSettings
NODE_SETTINGS_MODEL = model.AddonS3NodeSettings
ROUTES = [routes.settings_routes]
SHORT_NAME = 's3'
FULL_NAME = 'Amazon Simple Storage Service'
OWNERS = ['user', 'node']
ADDED_DEFAULT = []
ADDED_MANDATORY = []
VIEWS = []
CONFIGS = ['user', 'node']
CATEGORIES = ['storage']
INCLUDE_JS = {}
INCLUDE_CSS = {
'widget': [],
'page': [],
}
HAS_HGRID_FILES = True
GET_HGRID_DATA = views.hgrid.s3_hgrid_data
# 1024 ** 1024 # There really shouldnt be a limit...
MAX_FILE_SIZE = 128 # MB
HERE = os.path.dirname(os.path.abspath(__file__))
NODE_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 's3_node_settings.mako')
USER_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 's3_user_settings.mako')
| import os
from . import model
from . import routes
from . import views
MODELS = [model.AddonS3UserSettings, model.AddonS3NodeSettings, model.S3GuidFile]
USER_SETTINGS_MODEL = model.AddonS3UserSettings
NODE_SETTINGS_MODEL = model.AddonS3NodeSettings
ROUTES = [routes.settings_routes]
SHORT_NAME = 's3'
FULL_NAME = 'Amazon S3'
OWNERS = ['user', 'node']
ADDED_DEFAULT = []
ADDED_MANDATORY = []
VIEWS = []
CONFIGS = ['user', 'node']
CATEGORIES = ['storage']
INCLUDE_JS = {}
INCLUDE_CSS = {
'widget': [],
'page': [],
}
HAS_HGRID_FILES = True
GET_HGRID_DATA = views.hgrid.s3_hgrid_data
# 1024 ** 1024 # There really shouldnt be a limit...
MAX_FILE_SIZE = 128 # MB
HERE = os.path.dirname(os.path.abspath(__file__))
NODE_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 's3_node_settings.mako')
USER_SETTINGS_TEMPLATE = os.path.join(HERE, 'templates', 's3_user_settings.mako')
| Change S3 full name to Amazon S3 | Change S3 full name to Amazon S3
| Python | apache-2.0 | hmoco/osf.io,jmcarp/osf.io,abought/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,sloria/osf.io,barbour-em/osf.io,lyndsysimon/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,HarryRybacki/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,TomBaxter/osf.io,samanehsan/osf.io,zachjanicki/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,ckc6cz/osf.io,MerlinZhang/osf.io,CenterForOpenScience/osf.io,jmcarp/osf.io,caneruguz/osf.io,HarryRybacki/osf.io,njantrania/osf.io,fabianvf/osf.io,danielneis/osf.io,jnayak1/osf.io,binoculars/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,acshi/osf.io,leb2dg/osf.io,haoyuchen1992/osf.io,cwisecarver/osf.io,Ghalko/osf.io,jolene-esposito/osf.io,ticklemepierce/osf.io,zamattiac/osf.io,arpitar/osf.io,samchrisinger/osf.io,lyndsysimon/osf.io,hmoco/osf.io,ticklemepierce/osf.io,reinaH/osf.io,rdhyee/osf.io,binoculars/osf.io,fabianvf/osf.io,GageGaskins/osf.io,GageGaskins/osf.io,danielneis/osf.io,jolene-esposito/osf.io,kch8qx/osf.io,cldershem/osf.io,ckc6cz/osf.io,bdyetton/prettychart,revanthkolli/osf.io,brandonPurvis/osf.io,TomHeatwole/osf.io,mattclark/osf.io,SSJohns/osf.io,zachjanicki/osf.io,reinaH/osf.io,zamattiac/osf.io,cwisecarver/osf.io,mattclark/osf.io,bdyetton/prettychart,leb2dg/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,dplorimer/osf,billyhunt/osf.io,barbour-em/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,ckc6cz/osf.io,caseyrygt/osf.io,lamdnhan/osf.io,rdhyee/osf.io,Nesiehr/osf.io,acshi/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,abought/osf.io,CenterForOpenScience/osf.io,revanthkolli/osf.io,caseyrygt/osf.io,TomBaxter/osf.io,brandonPurvis/osf.io,acshi/osf.io,TomHeatwole/osf.io,amyshi188/osf.io,erinspace/osf.io,icereval/osf.io,jnayak1/osf.io,jmcarp/osf.io,mattclark/osf.io,doublebits/osf.io,aaxelb/osf.io,sbt9uc/osf.io,kwierman/osf.io,rdhyee/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,caneruguz/osf.io,pattisdr/osf.io,caseyrygt/osf.io,petermalcolm/osf.io,DanielSBrown/osf.io,Ghalko/osf.io,cslzchen/osf.io,alexschiller/osf.io,amyshi188/osf.io,sbt9uc/osf.io,monikagrabowska/osf.io,fabianvf/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,emetsger/osf.io,billyhunt/osf.io,emetsger/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,doublebits/osf.io,GaryKriebel/osf.io,ticklemepierce/osf.io,RomanZWang/osf.io,doublebits/osf.io,MerlinZhang/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,lamdnhan/osf.io,TomHeatwole/osf.io,doublebits/osf.io,lamdnhan/osf.io,revanthkolli/osf.io,kwierman/osf.io,jeffreyliu3230/osf.io,jinluyuan/osf.io,lyndsysimon/osf.io,baylee-d/osf.io,erinspace/osf.io,barbour-em/osf.io,saradbowman/osf.io,felliott/osf.io,barbour-em/osf.io,wearpants/osf.io,cwisecarver/osf.io,hmoco/osf.io,jeffreyliu3230/osf.io,Johnetordoff/osf.io,alexschiller/osf.io,jeffreyliu3230/osf.io,sbt9uc/osf.io,aaxelb/osf.io,Nesiehr/osf.io,felliott/osf.io,njantrania/osf.io,ZobairAlijan/osf.io,danielneis/osf.io,reinaH/osf.io,icereval/osf.io,amyshi188/osf.io,KAsante95/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,haoyuchen1992/osf.io,billyhunt/osf.io,cslzchen/osf.io,samchrisinger/osf.io,HarryRybacki/osf.io,felliott/osf.io,hmoco/osf.io,ticklemepierce/osf.io,baylee-d/osf.io,fabianvf/osf.io,lyndsysimon/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,binoculars/osf.io,cldershem/osf.io,chrisseto/osf.io,caneruguz/osf.io,rdhyee/osf.io,asanfilippo7/osf.io,mfraezz/osf.io,adlius/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,emetsger/osf.io,brianjgeiger/osf.io,kwierman/osf.io,pattisdr/osf.io,cosenal/osf.io,danielneis/osf.io,reinaH/osf.io,sloria/osf.io,jeffreyliu3230/osf.io,GaryKriebel/osf.io,jnayak1/osf.io,petermalcolm/osf.io,DanielSBrown/osf.io,wearpants/osf.io,emetsger/osf.io,HarryRybacki/osf.io,asanfilippo7/osf.io,zkraime/osf.io,mfraezz/osf.io,KAsante95/osf.io,arpitar/osf.io,aaxelb/osf.io,caseyrollins/osf.io,acshi/osf.io,Johnetordoff/osf.io,arpitar/osf.io,chrisseto/osf.io,mluke93/osf.io,dplorimer/osf,samanehsan/osf.io,haoyuchen1992/osf.io,KAsante95/osf.io,revanthkolli/osf.io,cosenal/osf.io,baylee-d/osf.io,acshi/osf.io,caseyrollins/osf.io,felliott/osf.io,billyhunt/osf.io,sbt9uc/osf.io,GageGaskins/osf.io,Ghalko/osf.io,jolene-esposito/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,doublebits/osf.io,chennan47/osf.io,MerlinZhang/osf.io,mluo613/osf.io,saradbowman/osf.io,MerlinZhang/osf.io,petermalcolm/osf.io,cwisecarver/osf.io,mfraezz/osf.io,samchrisinger/osf.io,wearpants/osf.io,njantrania/osf.io,kwierman/osf.io,cosenal/osf.io,arpitar/osf.io,zkraime/osf.io,billyhunt/osf.io,abought/osf.io,jinluyuan/osf.io,njantrania/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,samanehsan/osf.io,KAsante95/osf.io,zkraime/osf.io,laurenrevere/osf.io,ckc6cz/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,jolene-esposito/osf.io,adlius/osf.io,jnayak1/osf.io,ZobairAlijan/osf.io,aaxelb/osf.io,dplorimer/osf,zachjanicki/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,bdyetton/prettychart,zkraime/osf.io,samchrisinger/osf.io,adlius/osf.io,SSJohns/osf.io,samanehsan/osf.io,chennan47/osf.io,leb2dg/osf.io,GaryKriebel/osf.io,caseyrollins/osf.io,mluke93/osf.io,crcresearch/osf.io,Ghalko/osf.io,sloria/osf.io,erinspace/osf.io,mluo613/osf.io,abought/osf.io,adlius/osf.io,cldershem/osf.io,icereval/osf.io,crcresearch/osf.io,mluke93/osf.io,cldershem/osf.io,Johnetordoff/osf.io,lamdnhan/osf.io,laurenrevere/osf.io,alexschiller/osf.io,GageGaskins/osf.io,zachjanicki/osf.io,kch8qx/osf.io,bdyetton/prettychart,petermalcolm/osf.io,kch8qx/osf.io,Nesiehr/osf.io,dplorimer/osf,mluke93/osf.io,jmcarp/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,CenterForOpenScience/osf.io,brandonPurvis/osf.io,zamattiac/osf.io,kch8qx/osf.io,cslzchen/osf.io,chennan47/osf.io,GaryKriebel/osf.io,cosenal/osf.io,alexschiller/osf.io,jinluyuan/osf.io,KAsante95/osf.io,zamattiac/osf.io,mluo613/osf.io,mluo613/osf.io,caseyrygt/osf.io,chrisseto/osf.io,jinluyuan/osf.io,haoyuchen1992/osf.io |
3a073bb52224876b2424404a59df3c9e3d3fff89 | lesson2/read_passwd/read_passwd.py | lesson2/read_passwd/read_passwd.py | '''
Created on Sep 27, 2013
@author: dgamez
'''
def run():
t = lee_passwd ()
for l in t:
# print l
e = l.split(":")
user = e[0]
shell = e[-1].rstrip("\n")
print "%s -> %s" % (user, shell)
def lee_passwd ():
f = open('/etc/passwd','r')
t = f.readlines ()
f.close ()
return t
if __name__ == '__main__':
run () | '''
Created on Sep 27, 2013
Testing
@author: dgamez
'''
def run():
t = lee_passwd ()
for l in t:
# print l
e = l.split(":")
user = e[0]
shell = e[-1].rstrip("\n")
print "%s -> %s" % (user, shell)
def lee_passwd ():
f = open('/etc/passwd','r')
t = f.readlines ()
f.close ()
return t
if __name__ == '__main__':
run () | Verify EGit commits/pushes to Master | Verify EGit commits/pushes to Master | Python | bsd-2-clause | gamezdaniel/mswl-dt-2013,gamezdaniel/mswl-dt-2013 |
02e718fb9dd82b252a5726a81eb3a70817d91a88 | test/backend/test_database/__init__.py | test/backend/test_database/__init__.py | null | # The main Flask application needs to imported before any of the database tests so that linkr.db
# is defined before attempting to access any of the database modules database.*.
# This is the result of a import "race condition" caused by the fact that Flask requires references
# to any libraries to be declared explicitly as a global variable before potentially more imports
# that make use of that library.
# Honestly, just don't worry about it, and don't touch this line
import linkr
| Fix import race condition on CI | Fix import race condition on CI
| Python | mit | LINKIWI/linkr,LINKIWI/linkr,LINKIWI/linkr |
d18ff30bbddde5049ffbe23bce19288c3c47e41b | posts/views.py | posts/views.py | from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import Post
class PostListView(ListView):
model = Post
context_object_name = 'posts'
class PostDetailView(DetailView):
model = Post
context_object_name = 'post'
| from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from .models import Post
class PostListView(ListView):
model = Post
context_object_name = 'posts'
def get_queryset(self):
"""
Order posts by the day they were added, from newest, to oldest.
"""
queryset = super(PostListView, self).get_queryset()
return queryset.order_by('-added_at')
class PostDetailView(DetailView):
model = Post
context_object_name = 'post'
| Order posts from newest to oldest | posts: Order posts from newest to oldest
| Python | mit | rtrembecky/roots,tbabej/roots,rtrembecky/roots,tbabej/roots,matus-stehlik/roots,matus-stehlik/roots,matus-stehlik/glowing-batman,matus-stehlik/roots,matus-stehlik/glowing-batman,rtrembecky/roots,tbabej/roots |
dae8420456280fdf1f0971301986995c21fd8027 | static_template_view/views.py | static_template_view/views.py | # View for semi-static templatized content.
#
# List of valid templates is explicitly managed for (short-term)
# security reasons.
from mitxmako.shortcuts import render_to_response, render_to_string
from django.shortcuts import redirect
from django.core.context_processors import csrf
from django.conf import settings
#valid_templates=['index.html', 'staff.html', 'info.html', 'credits.html']
valid_templates=['mitx_global.html',
'index.html',
'tos.html',
'privacy.html',
'honor.html',
'copyright.html',
'404.html']
print "!!",settings.__dict__
if settings.STATIC_GRAB:
valid_templates = valid_templates+['server-down.html',
'server-error.html'
'server-overloaded.html']
def index(request, template):
csrf_token = csrf(request)['csrf_token']
if template in valid_templates:
return render_to_response(template, {'error' : '',
'csrf': csrf_token })
else:
return redirect('/')
valid_auth_templates=['help.html']
def auth_index(request, template):
if not request.user.is_authenticated():
return redirect('/')
if template in valid_auth_templates:
return render_to_response(template,{})
else:
return redirect('/')
| # View for semi-static templatized content.
#
# List of valid templates is explicitly managed for (short-term)
# security reasons.
from mitxmako.shortcuts import render_to_response, render_to_string
from django.shortcuts import redirect
from django.core.context_processors import csrf
from django.conf import settings
#valid_templates=['index.html', 'staff.html', 'info.html', 'credits.html']
valid_templates=['index.html',
'tos.html',
'privacy.html',
'honor.html',
'copyright.html',
'404.html']
print "!!",settings.__dict__
if settings.STATIC_GRAB:
valid_templates = valid_templates+['server-down.html',
'server-error.html'
'server-overloaded.html',
'mitx_global.html',
'mitx-overview.html',
'6002x-faq.html',
'6002x-press-release.html'
]
def index(request, template):
csrf_token = csrf(request)['csrf_token']
if template in valid_templates:
return render_to_response(template, {'error' : '',
'csrf': csrf_token })
else:
return redirect('/')
valid_auth_templates=['help.html']
def auth_index(request, template):
if not request.user.is_authenticated():
return redirect('/')
if template in valid_auth_templates:
return render_to_response(template,{})
else:
return redirect('/')
| Support for static pages added | Support for static pages added
| Python | agpl-3.0 | appliedx/edx-platform,SivilTaram/edx-platform,inares/edx-platform,shurihell/testasia,zofuthan/edx-platform,wwj718/edx-platform,abdoosh00/edx-rtl-final,10clouds/edx-platform,unicri/edx-platform,Edraak/edx-platform,praveen-pal/edx-platform,jruiperezv/ANALYSE,fly19890211/edx-platform,Lektorium-LLC/edx-platform,morenopc/edx-platform,bdero/edx-platform,PepperPD/edx-pepper-platform,hamzehd/edx-platform,Livit/Livit.Learn.EdX,rationalAgent/edx-platform-custom,bitifirefly/edx-platform,ahmadio/edx-platform,eestay/edx-platform,jbassen/edx-platform,jamiefolsom/edx-platform,pelikanchik/edx-platform,franosincic/edx-platform,EduPepperPDTesting/pepper2013-testing,pomegranited/edx-platform,don-github/edx-platform,shashank971/edx-platform,MakeHer/edx-platform,eemirtekin/edx-platform,hkawasaki/kawasaki-aio8-1,dkarakats/edx-platform,abdoosh00/edraak,antoviaque/edx-platform,utecuy/edx-platform,Kalyzee/edx-platform,DNFcode/edx-platform,vikas1885/test1,xuxiao19910803/edx-platform,kmoocdev2/edx-platform,jruiperezv/ANALYSE,jamiefolsom/edx-platform,alu042/edx-platform,kamalx/edx-platform,UXE/local-edx,ESOedX/edx-platform,jzoldak/edx-platform,bdero/edx-platform,Edraak/circleci-edx-platform,a-parhom/edx-platform,rue89-tech/edx-platform,jruiperezv/ANALYSE,vikas1885/test1,synergeticsedx/deployment-wipro,vikas1885/test1,ahmadio/edx-platform,msegado/edx-platform,polimediaupv/edx-platform,jazkarta/edx-platform-for-isc,jamesblunt/edx-platform,dsajkl/123,hkawasaki/kawasaki-aio8-1,simbs/edx-platform,kmoocdev2/edx-platform,JioEducation/edx-platform,SravanthiSinha/edx-platform,RPI-OPENEDX/edx-platform,romain-li/edx-platform,waheedahmed/edx-platform,SivilTaram/edx-platform,Ayub-Khan/edx-platform,abdoosh00/edraak,IITBinterns13/edx-platform-dev,jazkarta/edx-platform-for-isc,cecep-edu/edx-platform,Kalyzee/edx-platform,Ayub-Khan/edx-platform,kmoocdev/edx-platform,ampax/edx-platform,martynovp/edx-platform,arifsetiawan/edx-platform,nanolearningllc/edx-platform-cypress-2,shurihell/testasia,sudheerchintala/LearnEraPlatForm,tiagochiavericosta/edx-platform,atsolakid/edx-platform,doismellburning/edx-platform,edry/edx-platform,EduPepperPD/pepper2013,BehavioralInsightsTeam/edx-platform,alexthered/kienhoc-platform,mahendra-r/edx-platform,rismalrv/edx-platform,jazkarta/edx-platform,adoosii/edx-platform,openfun/edx-platform,chudaol/edx-platform,chudaol/edx-platform,DefyVentures/edx-platform,devs1991/test_edx_docmode,andyzsf/edx,carsongee/edx-platform,angelapper/edx-platform,zubair-arbi/edx-platform,chand3040/cloud_that,vismartltd/edx-platform,praveen-pal/edx-platform,martynovp/edx-platform,BehavioralInsightsTeam/edx-platform,longmen21/edx-platform,jazztpt/edx-platform,PepperPD/edx-pepper-platform,jswope00/GAI,halvertoluke/edx-platform,romain-li/edx-platform,beacloudgenius/edx-platform,cyanna/edx-platform,valtech-mooc/edx-platform,don-github/edx-platform,10clouds/edx-platform,bigdatauniversity/edx-platform,eestay/edx-platform,zerobatu/edx-platform,xuxiao19910803/edx-platform,ahmadio/edx-platform,shubhdev/edxOnBaadal,Stanford-Online/edx-platform,MakeHer/edx-platform,cognitiveclass/edx-platform,msegado/edx-platform,openfun/edx-platform,TeachAtTUM/edx-platform,pdehaye/theming-edx-platform,ahmadiga/min_edx,kxliugang/edx-platform,olexiim/edx-platform,jazkarta/edx-platform-for-isc,simbs/edx-platform,ak2703/edx-platform,waheedahmed/edx-platform,shurihell/testasia,zofuthan/edx-platform,nttks/jenkins-test,chand3040/cloud_that,procangroup/edx-platform,chudaol/edx-platform,jamiefolsom/edx-platform,stvstnfrd/edx-platform,nikolas/edx-platform,torchingloom/edx-platform,nagyistoce/edx-platform,leansoft/edx-platform,mushtaqak/edx-platform,devs1991/test_edx_docmode,ovnicraft/edx-platform,zhenzhai/edx-platform,zerobatu/edx-platform,jamesblunt/edx-platform,gsehub/edx-platform,chrisndodge/edx-platform,cognitiveclass/edx-platform,Lektorium-LLC/edx-platform,hamzehd/edx-platform,antoviaque/edx-platform,teltek/edx-platform,Unow/edx-platform,don-github/edx-platform,mjg2203/edx-platform-seas,CourseTalk/edx-platform,motion2015/edx-platform,TeachAtTUM/edx-platform,mbareta/edx-platform-ft,PepperPD/edx-pepper-platform,Ayub-Khan/edx-platform,auferack08/edx-platform,mushtaqak/edx-platform,cselis86/edx-platform,praveen-pal/edx-platform,nanolearningllc/edx-platform-cypress,chrisndodge/edx-platform,vismartltd/edx-platform,cpennington/edx-platform,yokose-ks/edx-platform,wwj718/edx-platform,arbrandes/edx-platform,ahmadio/edx-platform,Stanford-Online/edx-platform,xuxiao19910803/edx,doismellburning/edx-platform,franosincic/edx-platform,jelugbo/tundex,amir-qayyum-khan/edx-platform,martynovp/edx-platform,antonve/s4-project-mooc,eduNEXT/edx-platform,AkA84/edx-platform,arbrandes/edx-platform,y12uc231/edx-platform,valtech-mooc/edx-platform,angelapper/edx-platform,SivilTaram/edx-platform,jonathan-beard/edx-platform,fintech-circle/edx-platform,AkA84/edx-platform,TsinghuaX/edx-platform,rue89-tech/edx-platform,ZLLab-Mooc/edx-platform,abdoosh00/edx-rtl-final,polimediaupv/edx-platform,jjmiranda/edx-platform,xinjiguaike/edx-platform,RPI-OPENEDX/edx-platform,jazkarta/edx-platform-for-isc,caesar2164/edx-platform,bigdatauniversity/edx-platform,cpennington/edx-platform,ampax/edx-platform-backup,waheedahmed/edx-platform,sudheerchintala/LearnEraPlatForm,solashirai/edx-platform,doganov/edx-platform,eduNEXT/edx-platform,doismellburning/edx-platform,ferabra/edx-platform,jswope00/griffinx,louyihua/edx-platform,unicri/edx-platform,nikolas/edx-platform,syjeon/new_edx,Edraak/edraak-platform,alexthered/kienhoc-platform,shubhdev/openedx,xingyepei/edx-platform,vikas1885/test1,ak2703/edx-platform,morenopc/edx-platform,nanolearning/edx-platform,shubhdev/edx-platform,Softmotions/edx-platform,nanolearningllc/edx-platform-cypress-2,hamzehd/edx-platform,Semi-global/edx-platform,cecep-edu/edx-platform,ESOedX/edx-platform,xuxiao19910803/edx,ak2703/edx-platform,SivilTaram/edx-platform,benpatterson/edx-platform,pku9104038/edx-platform,rhndg/openedx,knehez/edx-platform,jamiefolsom/edx-platform,zhenzhai/edx-platform,4eek/edx-platform,arbrandes/edx-platform,naresh21/synergetics-edx-platform,Semi-global/edx-platform,alu042/edx-platform,beacloudgenius/edx-platform,raccoongang/edx-platform,defance/edx-platform,jjmiranda/edx-platform,chauhanhardik/populo_2,jolyonb/edx-platform,4eek/edx-platform,adoosii/edx-platform,UOMx/edx-platform,beacloudgenius/edx-platform,OmarIthawi/edx-platform,SivilTaram/edx-platform,inares/edx-platform,appliedx/edx-platform,chand3040/cloud_that,longmen21/edx-platform,mahendra-r/edx-platform,IONISx/edx-platform,Softmotions/edx-platform,tiagochiavericosta/edx-platform,xuxiao19910803/edx,a-parhom/edx-platform,valtech-mooc/edx-platform,longmen21/edx-platform,shurihell/testasia,louyihua/edx-platform,ampax/edx-platform-backup,analyseuc3m/ANALYSE-v1,philanthropy-u/edx-platform,kamalx/edx-platform,eemirtekin/edx-platform,zofuthan/edx-platform,jzoldak/edx-platform,cognitiveclass/edx-platform,B-MOOC/edx-platform,hastexo/edx-platform,bigdatauniversity/edx-platform,Edraak/edraak-platform,Edraak/edx-platform,hmcmooc/muddx-platform,Unow/edx-platform,amir-qayyum-khan/edx-platform,CredoReference/edx-platform,fly19890211/edx-platform,bdero/edx-platform,procangroup/edx-platform,shashank971/edx-platform,ubc/edx-platform,dkarakats/edx-platform,defance/edx-platform,yokose-ks/edx-platform,jswope00/griffinx,ampax/edx-platform,andyzsf/edx,solashirai/edx-platform,kmoocdev/edx-platform,vasyarv/edx-platform,pomegranited/edx-platform,jbzdak/edx-platform,DNFcode/edx-platform,yokose-ks/edx-platform,utecuy/edx-platform,marcore/edx-platform,IONISx/edx-platform,ovnicraft/edx-platform,doganov/edx-platform,zhenzhai/edx-platform,edry/edx-platform,Livit/Livit.Learn.EdX,leansoft/edx-platform,proversity-org/edx-platform,y12uc231/edx-platform,pabloborrego93/edx-platform,B-MOOC/edx-platform,jazkarta/edx-platform,J861449197/edx-platform,Softmotions/edx-platform,motion2015/edx-platform,chauhanhardik/populo_2,rue89-tech/edx-platform,knehez/edx-platform,jazkarta/edx-platform,nanolearningllc/edx-platform-cypress,AkA84/edx-platform,adoosii/edx-platform,naresh21/synergetics-edx-platform,itsjeyd/edx-platform,dkarakats/edx-platform,mbareta/edx-platform-ft,dkarakats/edx-platform,kalebhartje/schoolboost,pelikanchik/edx-platform,adoosii/edx-platform,UOMx/edx-platform,CredoReference/edx-platform,lduarte1991/edx-platform,hmcmooc/muddx-platform,JCBarahona/edX,SravanthiSinha/edx-platform,DNFcode/edx-platform,tanmaykm/edx-platform,iivic/BoiseStateX,philanthropy-u/edx-platform,chauhanhardik/populo_2,wwj718/ANALYSE,edry/edx-platform,openfun/edx-platform,Edraak/circleci-edx-platform,cyanna/edx-platform,morenopc/edx-platform,bigdatauniversity/edx-platform,wwj718/edx-platform,eduNEXT/edunext-platform,auferack08/edx-platform,DefyVentures/edx-platform,nanolearningllc/edx-platform-cypress-2,procangroup/edx-platform,SravanthiSinha/edx-platform,MSOpenTech/edx-platform,jzoldak/edx-platform,jbzdak/edx-platform,dsajkl/reqiop,eestay/edx-platform,philanthropy-u/edx-platform,pelikanchik/edx-platform,ovnicraft/edx-platform,shubhdev/edx-platform,hastexo/edx-platform,Kalyzee/edx-platform,mcgachey/edx-platform,pomegranited/edx-platform,atsolakid/edx-platform,shubhdev/openedx,shubhdev/edx-platform,deepsrijit1105/edx-platform,cpennington/edx-platform,ferabra/edx-platform,caesar2164/edx-platform,UXE/local-edx,chudaol/edx-platform,beni55/edx-platform,stvstnfrd/edx-platform,Semi-global/edx-platform,dsajkl/123,utecuy/edx-platform,pabloborrego93/edx-platform,EduPepperPD/pepper2013,jbassen/edx-platform,analyseuc3m/ANALYSE-v1,WatanabeYasumasa/edx-platform,hastexo/edx-platform,mitocw/edx-platform,miptliot/edx-platform,PepperPD/edx-pepper-platform,chauhanhardik/populo,edx-solutions/edx-platform,naresh21/synergetics-edx-platform,fly19890211/edx-platform,cognitiveclass/edx-platform,rismalrv/edx-platform,kmoocdev2/edx-platform,mtlchun/edx,shubhdev/edx-platform,analyseuc3m/ANALYSE-v1,miptliot/edx-platform,ESOedX/edx-platform,iivic/BoiseStateX,Edraak/circleci-edx-platform,hkawasaki/kawasaki-aio8-1,nanolearningllc/edx-platform-cypress-2,Lektorium-LLC/edx-platform,zadgroup/edx-platform,zadgroup/edx-platform,beacloudgenius/edx-platform,jbassen/edx-platform,xinjiguaike/edx-platform,Edraak/edx-platform,jbzdak/edx-platform,WatanabeYasumasa/edx-platform,auferack08/edx-platform,Shrhawk/edx-platform,EDUlib/edx-platform,naresh21/synergetics-edx-platform,pdehaye/theming-edx-platform,Kalyzee/edx-platform,pku9104038/edx-platform,doganov/edx-platform,nttks/edx-platform,xuxiao19910803/edx-platform,EduPepperPD/pepper2013,ZLLab-Mooc/edx-platform,kursitet/edx-platform,EDUlib/edx-platform,xinjiguaike/edx-platform,Livit/Livit.Learn.EdX,jbassen/edx-platform,jonathan-beard/edx-platform,mcgachey/edx-platform,MakeHer/edx-platform,mahendra-r/edx-platform,arifsetiawan/edx-platform,DNFcode/edx-platform,sameetb-cuelogic/edx-platform-test,shubhdev/edxOnBaadal,ZLLab-Mooc/edx-platform,ferabra/edx-platform,hkawasaki/kawasaki-aio8-0,B-MOOC/edx-platform,Semi-global/edx-platform,pku9104038/edx-platform,beni55/edx-platform,Lektorium-LLC/edx-platform,EduPepperPDTesting/pepper2013-testing,etzhou/edx-platform,unicri/edx-platform,cpennington/edx-platform,lduarte1991/edx-platform,EduPepperPDTesting/pepper2013-testing,edry/edx-platform,EduPepperPDTesting/pepper2013-testing,OmarIthawi/edx-platform,mjirayu/sit_academy,IndonesiaX/edx-platform,playm2mboy/edx-platform,ahmedaljazzar/edx-platform,jolyonb/edx-platform,yokose-ks/edx-platform,jazztpt/edx-platform,antoviaque/edx-platform,olexiim/edx-platform,mjirayu/sit_academy,apigee/edx-platform,motion2015/a3,zerobatu/edx-platform,franosincic/edx-platform,ovnicraft/edx-platform,LICEF/edx-platform,angelapper/edx-platform,zadgroup/edx-platform,shubhdev/openedx,xuxiao19910803/edx-platform,arifsetiawan/edx-platform,4eek/edx-platform,teltek/edx-platform,devs1991/test_edx_docmode,knehez/edx-platform,jswope00/griffinx,nanolearning/edx-platform,dsajkl/reqiop,adoosii/edx-platform,philanthropy-u/edx-platform,kalebhartje/schoolboost,IITBinterns13/edx-platform-dev,wwj718/edx-platform,shashank971/edx-platform,miptliot/edx-platform,hmcmooc/muddx-platform,ZLLab-Mooc/edx-platform,jruiperezv/ANALYSE,MSOpenTech/edx-platform,gsehub/edx-platform,marcore/edx-platform,zubair-arbi/edx-platform,leansoft/edx-platform,utecuy/edx-platform,msegado/edx-platform,fly19890211/edx-platform,Edraak/edx-platform,leansoft/edx-platform,etzhou/edx-platform,rue89-tech/edx-platform,LearnEra/LearnEraPlaftform,pepeportela/edx-platform,xuxiao19910803/edx-platform,deepsrijit1105/edx-platform,cyanna/edx-platform,rhndg/openedx,morpheby/levelup-by,gsehub/edx-platform,polimediaupv/edx-platform,proversity-org/edx-platform,openfun/edx-platform,cecep-edu/edx-platform,UOMx/edx-platform,carsongee/edx-platform,arifsetiawan/edx-platform,Softmotions/edx-platform,kursitet/edx-platform,pdehaye/theming-edx-platform,mbareta/edx-platform-ft,bitifirefly/edx-platform,ZLLab-Mooc/edx-platform,Edraak/edx-platform,ampax/edx-platform,doganov/edx-platform,PepperPD/edx-pepper-platform,hamzehd/edx-platform,EDUlib/edx-platform,J861449197/edx-platform,shashank971/edx-platform,MSOpenTech/edx-platform,nttks/jenkins-test,UXE/local-edx,IndonesiaX/edx-platform,IONISx/edx-platform,louyihua/edx-platform,shubhdev/edxOnBaadal,sameetb-cuelogic/edx-platform-test,JCBarahona/edX,ahmadio/edx-platform,CredoReference/edx-platform,AkA84/edx-platform,kxliugang/edx-platform,nttks/edx-platform,zadgroup/edx-platform,sameetb-cuelogic/edx-platform-test,Shrhawk/edx-platform,IndonesiaX/edx-platform,jolyonb/edx-platform,IITBinterns13/edx-platform-dev,doismellburning/edx-platform,pelikanchik/edx-platform,Stanford-Online/edx-platform,mcgachey/edx-platform,nanolearningllc/edx-platform-cypress,jonathan-beard/edx-platform,mitocw/edx-platform,syjeon/new_edx,y12uc231/edx-platform,beni55/edx-platform,chauhanhardik/populo_2,morpheby/levelup-by,JioEducation/edx-platform,alu042/edx-platform,nanolearningllc/edx-platform-cypress,hkawasaki/kawasaki-aio8-2,xuxiao19910803/edx,chauhanhardik/populo_2,benpatterson/edx-platform,ovnicraft/edx-platform,kxliugang/edx-platform,ubc/edx-platform,vasyarv/edx-platform,synergeticsedx/deployment-wipro,rhndg/openedx,bitifirefly/edx-platform,antoviaque/edx-platform,simbs/edx-platform,pepeportela/edx-platform,nttks/jenkins-test,dsajkl/reqiop,cecep-edu/edx-platform,vismartltd/edx-platform,mjirayu/sit_academy,olexiim/edx-platform,defance/edx-platform,jswope00/GAI,ak2703/edx-platform,CourseTalk/edx-platform,zubair-arbi/edx-platform,Semi-global/edx-platform,CourseTalk/edx-platform,chand3040/cloud_that,torchingloom/edx-platform,mitocw/edx-platform,LICEF/edx-platform,xuxiao19910803/edx,alexthered/kienhoc-platform,nagyistoce/edx-platform,syjeon/new_edx,chauhanhardik/populo,morenopc/edx-platform,jonathan-beard/edx-platform,jelugbo/tundex,kamalx/edx-platform,ahmadiga/min_edx,Ayub-Khan/edx-platform,fintech-circle/edx-platform,jswope00/griffinx,polimediaupv/edx-platform,itsjeyd/edx-platform,shabab12/edx-platform,hkawasaki/kawasaki-aio8-0,mjirayu/sit_academy,romain-li/edx-platform,chudaol/edx-platform,MakeHer/edx-platform,bitifirefly/edx-platform,etzhou/edx-platform,wwj718/ANALYSE,jbzdak/edx-platform,playm2mboy/edx-platform,inares/edx-platform,marcore/edx-platform,defance/edx-platform,LearnEra/LearnEraPlaftform,don-github/edx-platform,ESOedX/edx-platform,kalebhartje/schoolboost,pepeportela/edx-platform,nttks/edx-platform,ubc/edx-platform,J861449197/edx-platform,antonve/s4-project-mooc,dsajkl/123,mjg2203/edx-platform-seas,mahendra-r/edx-platform,pomegranited/edx-platform,shubhdev/openedx,cyanna/edx-platform,sameetb-cuelogic/edx-platform-test,jelugbo/tundex,WatanabeYasumasa/edx-platform,jruiperezv/ANALYSE,franosincic/edx-platform,vasyarv/edx-platform,lduarte1991/edx-platform,JCBarahona/edX,cselis86/edx-platform,zofuthan/edx-platform,unicri/edx-platform,hkawasaki/kawasaki-aio8-0,sudheerchintala/LearnEraPlatForm,IITBinterns13/edx-platform-dev,kmoocdev2/edx-platform,chauhanhardik/populo,kursitet/edx-platform,ahmedaljazzar/edx-platform,openfun/edx-platform,jjmiranda/edx-platform,nanolearningllc/edx-platform-cypress-2,MakeHer/edx-platform,abdoosh00/edx-rtl-final,motion2015/edx-platform,motion2015/edx-platform,morenopc/edx-platform,ampax/edx-platform-backup,jamesblunt/edx-platform,10clouds/edx-platform,tanmaykm/edx-platform,apigee/edx-platform,stvstnfrd/edx-platform,utecuy/edx-platform,mbareta/edx-platform-ft,JCBarahona/edX,tiagochiavericosta/edx-platform,amir-qayyum-khan/edx-platform,cselis86/edx-platform,iivic/BoiseStateX,gymnasium/edx-platform,JioEducation/edx-platform,nttks/edx-platform,IndonesiaX/edx-platform,beni55/edx-platform,gsehub/edx-platform,halvertoluke/edx-platform,shurihell/testasia,ahmedaljazzar/edx-platform,franosincic/edx-platform,zofuthan/edx-platform,vismartltd/edx-platform,UXE/local-edx,eemirtekin/edx-platform,abdoosh00/edraak,nanolearning/edx-platform,andyzsf/edx,iivic/BoiseStateX,simbs/edx-platform,Unow/edx-platform,alexthered/kienhoc-platform,don-github/edx-platform,mcgachey/edx-platform,rationalAgent/edx-platform-custom,mtlchun/edx,nikolas/edx-platform,jbassen/edx-platform,mjirayu/sit_academy,shabab12/edx-platform,carsongee/edx-platform,raccoongang/edx-platform,kmoocdev/edx-platform,torchingloom/edx-platform,LICEF/edx-platform,vasyarv/edx-platform,LearnEra/LearnEraPlaftform,benpatterson/edx-platform,nagyistoce/edx-platform,ahmedaljazzar/edx-platform,ampax/edx-platform-backup,knehez/edx-platform,Kalyzee/edx-platform,prarthitm/edxplatform,solashirai/edx-platform,UOMx/edx-platform,edx/edx-platform,jazztpt/edx-platform,playm2mboy/edx-platform,pomegranited/edx-platform,knehez/edx-platform,RPI-OPENEDX/edx-platform,valtech-mooc/edx-platform,doismellburning/edx-platform,arbrandes/edx-platform,jonathan-beard/edx-platform,proversity-org/edx-platform,zubair-arbi/edx-platform,Ayub-Khan/edx-platform,OmarIthawi/edx-platform,praveen-pal/edx-platform,jazztpt/edx-platform,morpheby/levelup-by,MSOpenTech/edx-platform,jjmiranda/edx-platform,LICEF/edx-platform,raccoongang/edx-platform,arifsetiawan/edx-platform,unicri/edx-platform,itsjeyd/edx-platform,marcore/edx-platform,martynovp/edx-platform,y12uc231/edx-platform,hastexo/edx-platform,jazkarta/edx-platform-for-isc,solashirai/edx-platform,tiagochiavericosta/edx-platform,yokose-ks/edx-platform,fly19890211/edx-platform,antonve/s4-project-mooc,mushtaqak/edx-platform,hkawasaki/kawasaki-aio8-0,EduPepperPD/pepper2013,CourseTalk/edx-platform,motion2015/edx-platform,edx-solutions/edx-platform,playm2mboy/edx-platform,atsolakid/edx-platform,4eek/edx-platform,vasyarv/edx-platform,kmoocdev/edx-platform,lduarte1991/edx-platform,beni55/edx-platform,wwj718/ANALYSE,beacloudgenius/edx-platform,jzoldak/edx-platform,rismalrv/edx-platform,SravanthiSinha/edx-platform,ferabra/edx-platform,devs1991/test_edx_docmode,benpatterson/edx-platform,jswope00/GAI,prarthitm/edxplatform,EduPepperPDTesting/pepper2013-testing,appsembler/edx-platform,EduPepperPD/pepper2013,sameetb-cuelogic/edx-platform-test,shubhdev/edxOnBaadal,vikas1885/test1,tiagochiavericosta/edx-platform,jelugbo/tundex,Edraak/edraak-platform,devs1991/test_edx_docmode,playm2mboy/edx-platform,benpatterson/edx-platform,appliedx/edx-platform,hamzehd/edx-platform,vismartltd/edx-platform,TsinghuaX/edx-platform,devs1991/test_edx_docmode,atsolakid/edx-platform,alu042/edx-platform,halvertoluke/edx-platform,andyzsf/edx,kamalx/edx-platform,zerobatu/edx-platform,pku9104038/edx-platform,waheedahmed/edx-platform,martynovp/edx-platform,cecep-edu/edx-platform,IONISx/edx-platform,EDUlib/edx-platform,angelapper/edx-platform,mtlchun/edx,rhndg/openedx,chrisndodge/edx-platform,zhenzhai/edx-platform,hkawasaki/kawasaki-aio8-1,louyihua/edx-platform,wwj718/ANALYSE,ahmadiga/min_edx,ubc/edx-platform,cselis86/edx-platform,tanmaykm/edx-platform,jswope00/griffinx,kalebhartje/schoolboost,peterm-itr/edx-platform,a-parhom/edx-platform,sudheerchintala/LearnEraPlatForm,Endika/edx-platform,B-MOOC/edx-platform,dcosentino/edx-platform,appliedx/edx-platform,J861449197/edx-platform,JioEducation/edx-platform,zerobatu/edx-platform,ferabra/edx-platform,pabloborrego93/edx-platform,etzhou/edx-platform,hmcmooc/muddx-platform,eemirtekin/edx-platform,eduNEXT/edunext-platform,torchingloom/edx-platform,cognitiveclass/edx-platform,msegado/edx-platform,rhndg/openedx,mjg2203/edx-platform-seas,stvstnfrd/edx-platform,jelugbo/tundex,jazkarta/edx-platform,rismalrv/edx-platform,TeachAtTUM/edx-platform,dsajkl/reqiop,simbs/edx-platform,synergeticsedx/deployment-wipro,Softmotions/edx-platform,jamesblunt/edx-platform,Endika/edx-platform,amir-qayyum-khan/edx-platform,shubhdev/edxOnBaadal,nikolas/edx-platform,cselis86/edx-platform,miptliot/edx-platform,motion2015/a3,waheedahmed/edx-platform,dsajkl/123,xinjiguaike/edx-platform,inares/edx-platform,IONISx/edx-platform,BehavioralInsightsTeam/edx-platform,TsinghuaX/edx-platform,gymnasium/edx-platform,pdehaye/theming-edx-platform,jazkarta/edx-platform,longmen21/edx-platform,tanmaykm/edx-platform,solashirai/edx-platform,Endika/edx-platform,Unow/edx-platform,xingyepei/edx-platform,chand3040/cloud_that,gymnasium/edx-platform,nttks/jenkins-test,a-parhom/edx-platform,nagyistoce/edx-platform,atsolakid/edx-platform,J861449197/edx-platform,mahendra-r/edx-platform,cyanna/edx-platform,kmoocdev2/edx-platform,pepeportela/edx-platform,mjg2203/edx-platform-seas,prarthitm/edxplatform,bigdatauniversity/edx-platform,halvertoluke/edx-platform,nanolearning/edx-platform,romain-li/edx-platform,shabab12/edx-platform,halvertoluke/edx-platform,zubair-arbi/edx-platform,edx/edx-platform,polimediaupv/edx-platform,antonve/s4-project-mooc,hkawasaki/kawasaki-aio8-2,dcosentino/edx-platform,caesar2164/edx-platform,wwj718/edx-platform,kmoocdev/edx-platform,OmarIthawi/edx-platform,xinjiguaike/edx-platform,Shrhawk/edx-platform,abdoosh00/edx-rtl-final,JCBarahona/edX,ak2703/edx-platform,mushtaqak/edx-platform,jamiefolsom/edx-platform,prarthitm/edxplatform,peterm-itr/edx-platform,valtech-mooc/edx-platform,dkarakats/edx-platform,nikolas/edx-platform,TeachAtTUM/edx-platform,LearnEra/LearnEraPlaftform,dcosentino/edx-platform,eduNEXT/edunext-platform,jazztpt/edx-platform,inares/edx-platform,edx/edx-platform,Stanford-Online/edx-platform,iivic/BoiseStateX,rationalAgent/edx-platform-custom,ampax/edx-platform,eduNEXT/edx-platform,proversity-org/edx-platform,LICEF/edx-platform,fintech-circle/edx-platform,motion2015/a3,hkawasaki/kawasaki-aio8-2,edry/edx-platform,DNFcode/edx-platform,abdoosh00/edraak,zhenzhai/edx-platform,xingyepei/edx-platform,nagyistoce/edx-platform,motion2015/a3,itsjeyd/edx-platform,mushtaqak/edx-platform,y12uc231/edx-platform,DefyVentures/edx-platform,longmen21/edx-platform,chauhanhardik/populo,SravanthiSinha/edx-platform,rationalAgent/edx-platform-custom,dsajkl/123,nanolearningllc/edx-platform-cypress,EduPepperPDTesting/pepper2013-testing,antonve/s4-project-mooc,chauhanhardik/populo,syjeon/new_edx,caesar2164/edx-platform,romain-li/edx-platform,kursitet/edx-platform,Livit/Livit.Learn.EdX,motion2015/a3,mitocw/edx-platform,DefyVentures/edx-platform,edx/edx-platform,chrisndodge/edx-platform,DefyVentures/edx-platform,alexthered/kienhoc-platform,TsinghuaX/edx-platform,doganov/edx-platform,deepsrijit1105/edx-platform,synergeticsedx/deployment-wipro,dcosentino/edx-platform,appsembler/edx-platform,appsembler/edx-platform,devs1991/test_edx_docmode,wwj718/ANALYSE,mtlchun/edx,gymnasium/edx-platform,torchingloom/edx-platform,msegado/edx-platform,morpheby/levelup-by,hkawasaki/kawasaki-aio8-2,bdero/edx-platform,Endika/edx-platform,jolyonb/edx-platform,nanolearning/edx-platform,eduNEXT/edx-platform,apigee/edx-platform,analyseuc3m/ANALYSE-v1,RPI-OPENEDX/edx-platform,edx-solutions/edx-platform,shabab12/edx-platform,leansoft/edx-platform,kalebhartje/schoolboost,kursitet/edx-platform,bitifirefly/edx-platform,procangroup/edx-platform,peterm-itr/edx-platform,devs1991/test_edx_docmode,raccoongang/edx-platform,carsongee/edx-platform,appsembler/edx-platform,eestay/edx-platform,10clouds/edx-platform,mcgachey/edx-platform,eemirtekin/edx-platform,kamalx/edx-platform,jamesblunt/edx-platform,etzhou/edx-platform,rationalAgent/edx-platform-custom,MSOpenTech/edx-platform,deepsrijit1105/edx-platform,teltek/edx-platform,apigee/edx-platform,auferack08/edx-platform,mtlchun/edx,AkA84/edx-platform,shubhdev/edx-platform,ahmadiga/min_edx,dcosentino/edx-platform,RPI-OPENEDX/edx-platform,Edraak/circleci-edx-platform,ahmadiga/min_edx,IndonesiaX/edx-platform,Shrhawk/edx-platform,eduNEXT/edunext-platform,4eek/edx-platform,fintech-circle/edx-platform,shashank971/edx-platform,nttks/edx-platform,peterm-itr/edx-platform,rue89-tech/edx-platform,B-MOOC/edx-platform,Edraak/circleci-edx-platform,edx-solutions/edx-platform,kxliugang/edx-platform,CredoReference/edx-platform,teltek/edx-platform,pabloborrego93/edx-platform,Edraak/edraak-platform,eestay/edx-platform,rismalrv/edx-platform,ubc/edx-platform,kxliugang/edx-platform,xingyepei/edx-platform,BehavioralInsightsTeam/edx-platform,shubhdev/openedx,jswope00/GAI,Shrhawk/edx-platform,olexiim/edx-platform,WatanabeYasumasa/edx-platform,appliedx/edx-platform,zadgroup/edx-platform,jbzdak/edx-platform,xingyepei/edx-platform,olexiim/edx-platform,nttks/jenkins-test,ampax/edx-platform-backup |
b583c5fb00d1ebfa0458a6233be85d8b56173abf | python/printbag.py | python/printbag.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Print a rosbag file.
"""
import sys
import logging
import numpy as np
# suppress logging warnings due to rospy
logging.basicConfig(filename='/dev/null')
import rosbag
from antlia.dtype import LIDAR_CONVERTED_DTYPE
def print_bag(bag, topics=None):
if topics is None:
#topics = ['/tf', '/scan']
topics = ['/scan', '/flagbutton_pressed']
for message in bag.read_messages(topics=topics):
print(message)
if __name__ == '__main__':
if len(sys.argv) < 2:
print(('Usage: {} <rosbag> \n\n'
'Print contents of rosbag file.'
).format(__file__))
sys.exit(1)
outfile = None
filename = sys.argv[1]
with rosbag.Bag(filename) as bag:
print_bag(bag)
sys.exit()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Print a rosbag file.
"""
import sys
import logging
# suppress logging warnings due to rospy
logging.basicConfig(filename='/dev/null')
import rosbag
def print_bag(bag, topics=None):
for message in bag.read_messages(topics=topics):
print(message)
if __name__ == '__main__':
if len(sys.argv) < 2:
print(('Usage: {} [topics] <rosbag> \n\n'
'topics:\tcomma-separated list of topics\n\n'
'Print contents of rosbag file. If topics is not provided, \n'
'all topics are printed\n'
).format(__file__))
sys.exit(1)
topics = None
if len(sys.argv) == 3:
topics = [t.strip() for t in sys.argv[1].split(',')]
filename = sys.argv[2]
else:
filename = sys.argv[1]
with rosbag.Bag(filename) as bag:
print_bag(bag, topics)
sys.exit()
| Add argument to specify bag topics | Add argument to specify bag topics
| Python | bsd-2-clause | oliverlee/antlia |
ca06bf1d52cd51ccec178c98ad407bfe59f1ada1 | strobe.py | strobe.py | import RPi.GPIO as GPIO
from time import sleep
def onoff(period, pin):
"""Symmetric square wave, equal time on/off"""
half_cycle = period / 2.0
GPIO.output(pin, GPIO.HIGH)
sleep(half_cycle)
GPIO.output(pin, GPIO.LOW)
sleep(half_cycle)
def strobe(freq, dur, pin):
nflashes = freq * dur
seconds_to_sleep = 1.0 / freq
# Use Raspberry-Pi board pin numbers. In other words, 11 means pin
# number 11, not GPIO 11.
GPIO.setmode(GPIO.BOARD)
GPIO.setup(pin, GPIO.OUT) # requires root?
for i in range(nflashes):
onoff(seconds_to_sleep, pin)
GPIO.cleanup()
| # Adapted from code by Rahul Kar
# http://www.rpiblog.com/2012/09/using-gpio-of-raspberry-pi-to-blink-led.html
import RPi.GPIO as GPIO
from time import sleep
def onoff(ontime, offtime, pin):
GPIO.output(pin, GPIO.HIGH)
sleep(ontime)
GPIO.output(pin, GPIO.LOW)
sleep(offtime)
def strobe(freq, dur, pin):
nflashes = freq * dur
period = 1.0 / freq
# Use Raspberry-Pi board pin numbers. In other words, 11 means pin
# number 11, not GPIO 11.
GPIO.setmode(GPIO.BOARD)
GPIO.setup(pin, GPIO.OUT) # requires root?
for i in range(nflashes):
onoff(period/2.0, period/2.0, pin)
GPIO.cleanup()
| Make onoff function more versatile | Make onoff function more versatile
| Python | mit | zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie |
aca17ff2fddd35bd50d78d62dc6dab7e47fb8e4e | controllers/default.py | controllers/default.py | import os
def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return _get_nexson(resource_id)
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git submodule at ./treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
def _get_nexson(study_id):
this_dir = os.path.dirname(os.path.abspath(__file__))
# the internal file structure will change soon to study/study_id/study_id-N.json, where N=0,1,2,3...
try:
filename = this_dir + "/../treenexus/study/0/" + study_id + ".json"
nexson_file = open(filename,'r')
except IOError:
return '{}'
return nexson_file.readlines()
| import os
def index():
def GET():
return locals()
@request.restful()
def api():
response.view = 'generic.json'
def GET(resource,resource_id):
if not resource=='study': raise HTTP(400)
# return the correct nexson of study_id
return _get_nexson(resource_id)
def POST(resource,resource_id):
if not resource=='study': raise HTTP(400)
# overwrite the nexson of study_id with the POSTed data
# 1) verify that it is valid json
# 2) Update local treenexus git submodule at ./treenexus
# 3) See if the hash of the current value of the file matches the hash of the POSTed data. If so, do nothing and return successfully.
# 4) If not, overwrite the correct nexson file on disk
# 5) Make a git commit with the updated nexson (add as much automated metadata to the commit message as possible)
# 6) return successfully
return dict()
return locals()
def _get_nexson(study_id):
this_dir = os.path.dirname(os.path.abspath(__file__))
try:
filename = this_dir + "/../treenexus/study/" + study_id + "/" + study_id + ".json"
nexson_file = open(filename,'r')
except IOError:
return '{}'
return nexson_file.readlines()
| Use the new location of study NexSON | Use the new location of study NexSON
Each study now has a distinct directory. Currently we only plan to store
a single JSON file in each directory, until one becomes larger than 50MB.
Additionally, this allows various metadata/artifacts about a study to live
near the actually study data.
| Python | bsd-2-clause | OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api,OpenTreeOfLife/phylesystem-api |
c9355e9ae6815b40dce72df9a9a8b1d8f169a8a3 | tests/test_morando_floripa.py | tests/test_morando_floripa.py | # coding: utf-8
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from rest_framework.test import APIRequestFactory, APIClient
class TestAPIViews(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.client = APIClient()
self.user = User.objects.create_user('[email protected]', password='testing')
self.user.save()
self.token = Token.objects.filter(user=self.user)
def _require_login(self):
self.client.login(username='testuser', password='testing')
def test_login_account(self):
"""
Testa o login com um usuario de testes.
"""
response = self.client.post(path='/rest-auth/login/', data={"username": '[email protected]', "password": 'testing'}, format='json')
self.assertEqual(response.status_code, 200, 'Expected Response Code 200, received {0} instead.'.format(response.status_code))
def test_login_account_fail(self):
"""
Testa o login nao autorizado com um usuario de testes.
"""
response = self.client.post('/rest-auth/login/',
{"username": '[email protected]', "password": 'testings'},
format='json')
self.assertEqual(response.status_code, 400,
'Expected Response Code 400, received {0} instead.'.format(response.status_code))
| # coding: utf-8
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
from rest_framework.test import APIRequestFactory, APIClient
class TestAPIViews(TestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.client = APIClient()
self.user = User.objects.create_user('[email protected]', password='testing')
self.user.save()
self.token = Token.objects.filter(user=self.user)
def _require_login(self):
self.client.login(username='testuser', password='testing')
def test_login_account(self):
"""
Testa o login com um usuario de testes.
"""
response = self.client.post(path='/v1/rest-auth/login/', data={"username": '[email protected]', "password": 'testing'}, format='json')
self.assertEqual(response.status_code, 200, 'Expected Response Code 200, received {0} instead.'.format(response.status_code))
def test_login_account_fail(self):
"""
Testa o login nao autorizado com um usuario de testes.
"""
response = self.client.post('/v1/rest-auth/login/',
{"username": '[email protected]', "password": 'testings'},
format='json')
self.assertEqual(response.status_code, 400,
'Expected Response Code 400, received {0} instead.'.format(response.status_code))
| Fix na rota dos testes | Fix na rota dos testes
| Python | mit | AlexandreProenca/backend-morandofloripa,AlexandreProenca/backend-morandofloripa,AlexandreProenca/backend-morandofloripa |
a08de1d3c7f7dfc72c8b3b8e9019d1b7b5ad004e | mdtraj/tests/test_load.py | mdtraj/tests/test_load.py | ##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
#
# Authors: Robert McGibbon
# Contributors:
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
"""
Tests of generic loading functionality.
"""
from mdtraj import load
from mdtraj.testing import get_fn
def test_load_single():
"""
Just check for any raised errors coming from loading a single file.
"""
load(get_fn('frame0.pdb'))
def test_load_single_list():
"""
See if a single-element list of files is successfully loaded.
"""
load([get_fn('frame0.pdb')])
def test_load_many_list():
"""
See if a multi-element list of files is successfully loaded.
"""
traj = load(2 * [get_fn('frame0.pdb')], discard_overlapping_frames=False)
assert traj.n_frames == 2
| from mdtraj import load
from mdtraj.testing import get_fn
def test_load_single():
"""
Just check for any raised errors coming from loading a single file.
"""
load(get_fn('frame0.pdb'))
def test_load_single_list():
"""
See if a single-element list of files is successfully loaded.
"""
load([get_fn('frame0.pdb')])
def test_load_many_list():
"""
See if a multi-element list of files is successfully loaded.
"""
single = load(get_fn('frame0.pdb'))
double = load(2 * [get_fn('frame0.pdb')], discard_overlapping_frames=False)
assert 2 * single.n_frames == double.n_frames
| Fix test for loading multiple trajectories. | Fix test for loading multiple trajectories.
| Python | lgpl-2.1 | msultan/mdtraj,rmcgibbo/mdtraj,tcmoore3/mdtraj,mdtraj/mdtraj,jchodera/mdtraj,msultan/mdtraj,ctk3b/mdtraj,ctk3b/mdtraj,mdtraj/mdtraj,msultan/mdtraj,mattwthompson/mdtraj,jchodera/mdtraj,jchodera/mdtraj,leeping/mdtraj,gph82/mdtraj,rmcgibbo/mdtraj,gph82/mdtraj,jchodera/mdtraj,mdtraj/mdtraj,leeping/mdtraj,tcmoore3/mdtraj,ctk3b/mdtraj,gph82/mdtraj,leeping/mdtraj,rmcgibbo/mdtraj,mattwthompson/mdtraj,ctk3b/mdtraj,mattwthompson/mdtraj,tcmoore3/mdtraj,mattwthompson/mdtraj,ctk3b/mdtraj,tcmoore3/mdtraj,dwhswenson/mdtraj,dwhswenson/mdtraj,leeping/mdtraj,msultan/mdtraj,dwhswenson/mdtraj |
4456fbdb36b82608501107c4060825377a75c0bf | ColorHistograms-python/color_histogram_cuda.py | ColorHistograms-python/color_histogram_cuda.py | import pycuda.autoinit
import pycuda.driver as drv
import numpy
from scipy import misc
from color_histogram_cuda_module import histogram_atomics, histogram_accum
def histogram(image_path, num_bins):
image = misc.imread(image_path)
bin_size = 256 / num_bins
# calculate image dimensions
(w, h, c) = image.shape
# reinterpret image with 4-byte type
image = image.view(numpy.uint32)
dest = numpy.zeros((bin_size, c), numpy.uint32)
parts = num_bins * c
block1 = (32, 4, 1)
grid1 = (16, 16, 1)
partial = numpy.zeros(grid1[0] * grid1[1] * parts, numpy.uint32)
block2 = (128,1, 1)
grid2 = ((c * num_bins + block2[0] - 1) / block2[0], 1, 1)
W = numpy.dtype(numpy.int32).type(w)
H = numpy.dtype(numpy.int32).type(h)
histogram_atomics(drv.In(image), W, H, drv.Out(partial), block=block1, grid=grid1)
sz = numpy.dtype(numpy.int32).type(grid1[0] * grid1[1])
histogram_accum(drv.In(partial), sz, drv.Out(dest), block=block2, grid=grid2)
return dest
| import pycuda.autoinit
import pycuda.driver as drv
import numpy
from scipy import misc
from color_histogram_cuda_module import histogram_atomics, histogram_accum
def histogram(image_path, num_bins):
image = misc.imread(image_path)
bin_size = 256 / num_bins
# calculate image dimensions
(w, h, c) = image.shape
# reinterpret image with 4-byte type
image = image.view(numpy.uint32)
dest = numpy.zeros((c, bin_size), numpy.uint32)
parts = num_bins * c
block1 = (32, 4, 1)
grid1 = (16, 16, 1)
partial = numpy.zeros(grid1[0] * grid1[1] * parts, numpy.uint32)
block2 = (128,1, 1)
grid2 = ((c * num_bins + block2[0] - 1) / block2[0], 1, 1)
W = numpy.dtype(numpy.int32).type(w)
H = numpy.dtype(numpy.int32).type(h)
histogram_atomics(drv.In(image), W, H, drv.Out(partial), block=block1, grid=grid1)
sz = numpy.dtype(numpy.int32).type(grid1[0] * grid1[1])
histogram_accum(drv.In(partial), sz, drv.Out(dest), block=block2, grid=grid2)
return dest
| Change dimensions of output in python wrapper | Change dimensions of output in python wrapper
| Python | bsd-3-clause | kwadraterry/GPGPU-LUT,kwadraterry/GPGPU-LUT,kwadraterry/GPGPU-LUT,kwadraterry/GPGPU-LUT,kwadraterry/GPGPU-LUT |
b3400070d47d95bfa2eeac3a9f696b8957d88128 | conjureup/controllers/clouds/tui.py | conjureup/controllers/clouds/tui.py | from conjureup import controllers, events, juju, utils
from conjureup.app_config import app
from conjureup.consts import cloud_types
from .common import BaseCloudController
class CloudsController(BaseCloudController):
def __controller_exists(self, controller):
return juju.get_controller(controller) is not None
def finish(self):
if app.argv.model:
app.provider.model = app.argv.model
else:
app.provider.model = utils.gen_model()
return controllers.use('credentials').render()
async def _check_lxd_compat(self):
utils.info(
"Summoning {} to {}".format(app.argv.spell, app.provider.cloud))
if app.provider.cloud_type == cloud_types.LOCALHOST:
try:
app.provider._set_lxd_dir_env()
client_compatible = await app.provider.is_client_compatible()
server_compatible = await app.provider.is_server_compatible()
if client_compatible and server_compatible:
self.finish()
else:
utils.error("LXD Server or LXC client not compatible")
events.Shutdown.set(1)
except app.provider.LocalhostError:
raise
def render(self):
app.loop.create_task(self._check_lxd_compat())
_controller_class = CloudsController
| from conjureup import controllers, events, juju, utils
from conjureup.app_config import app
from conjureup.consts import cloud_types
from .common import BaseCloudController
class CloudsController(BaseCloudController):
def __controller_exists(self, controller):
return juju.get_controller(controller) is not None
def finish(self):
if app.argv.model:
app.provider.model = app.argv.model
else:
app.provider.model = utils.gen_model()
return controllers.use('credentials').render()
async def _check_lxd_compat(self):
utils.info(
"Summoning {} to {}".format(app.argv.spell, app.provider.cloud))
if app.provider.cloud_type == cloud_types.LOCALHOST:
try:
app.provider._set_lxd_dir_env()
client_compatible = await app.provider.is_client_compatible()
server_compatible = await app.provider.is_server_compatible()
if client_compatible and server_compatible:
self.finish()
else:
utils.error("LXD Server or LXC client not compatible")
events.Shutdown.set(1)
except app.provider.LocalhostError:
raise
self.finish()
def render(self):
app.loop.create_task(self._check_lxd_compat())
_controller_class = CloudsController
| Fix issue where non localhost headless clouds werent calling finish | Fix issue where non localhost headless clouds werent calling finish
Signed-off-by: Adam Stokes <49c255c1d074742f60d19fdba5e2aa5a34add567@users.noreply.github.com>
| Python | mit | conjure-up/conjure-up,Ubuntu-Solutions-Engineering/conjure,ubuntu/conjure-up,Ubuntu-Solutions-Engineering/conjure,conjure-up/conjure-up,ubuntu/conjure-up |
8ed7983859e2b7031b736b92b875040fa13fb39c | blockbuster/bb_logging.py | blockbuster/bb_logging.py | import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh) | import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh) | Remove square brackets around log level in log ouput | Remove square brackets around log level in log ouput
| Python | mit | mattstibbs/blockbuster-server,mattstibbs/blockbuster-server |
b123001ea0d4fb475184727c39eafd5b46cc0964 | shopit_app/urls.py | shopit_app/urls.py | from django.conf import settings
from django.conf.urls import include, patterns, url
from rest_framework_nested import routers
from shopit_app.views import IndexView
from authentication_app.views import AccountViewSet, LoginView
router = routers.SimpleRouter()
router.register(r'accounts', AccountViewSet)
urlpatterns = patterns('',
# API endpoints
url(r'^api/v1/', include(router.urls)),
url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'),
url('^.*$', IndexView.as_view(), name='index'),
)
| from django.conf import settings
from django.conf.urls import include, patterns, url
from rest_framework_nested import routers
from shopit_app.views import IndexView
from authentication_app.views import AccountViewSet, LoginView, LogoutView
router = routers.SimpleRouter()
router.register(r'accounts', AccountViewSet)
urlpatterns = patterns('',
# API endpoints
url(r'^api/v1/', include(router.urls)),
url(r'^api/v1/auth/logout/$', LogoutView.as_view(), name='logout'),
url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'),
url('^.*$', IndexView.as_view(), name='index'),
)
| Add the endpoint for the logout. | Add the endpoint for the logout.
| Python | mit | mvpgomes/shopit-app,mvpgomes/shopit-app,mvpgomes/shopit-app,mvpgomes/shopit-app |
0d7921b4dcf5e3b511fdb54fc30ebc0547b14d47 | django_dzenlog/urls.py | django_dzenlog/urls.py | from django.conf.urls.defaults import *
from models import GeneralPost
from feeds import LatestPosts
post_list = {
'queryset': GeneralPost.objects.all(),
}
feeds = {
'all': LatestPosts,
}
urlpatterns = patterns('django.views.generic',
(r'^(?P<slug>[a-z0-9-]+)/$', 'list_detail.object_detail', post_list, 'dzenlog-post-details'),
(r'^$', 'list_detail.object_list', post_list, 'dzenlog-post-list'),
)
urlpatterns += patterns('django.contrib.syndication.views',
(r'^rss/(?P<url>.*)/$', 'feed', {'feed_dict': feeds}, 'dzenlog-feeds'),
)
| from django.conf.urls.defaults import *
from models import GeneralPost
from feeds import latest
post_list = {
'queryset': GeneralPost.objects.all(),
}
feeds = {
'all': latest(GeneralPost, 'dzenlog-post-list'),
}
urlpatterns = patterns('django.views.generic',
(r'^(?P<slug>[a-z0-9-]+)/$', 'list_detail.object_detail', post_list, 'dzenlog-post-details'),
(r'^$', 'list_detail.object_list', post_list, 'dzenlog-post-list'),
)
urlpatterns += patterns('django.contrib.syndication.views',
(r'^rss/(?P<url>.*)/$', 'feed', {'feed_dict': feeds}, 'dzenlog-feeds'),
)
| Use 'latest' to generate feed for GeneralPost. | Use 'latest' to generate feed for GeneralPost.
| Python | bsd-3-clause | svetlyak40wt/django-dzenlog |
acc6ae5fcdc7cc79b8e6ca76088080c3b73ed4f1 | django_rocket/admin.py | django_rocket/admin.py | from django.contrib import admin
from django_rocket.models import Subscriber
class SubscriberAdmin(admin.ModelAdmin):
list_display = ('email', 'created')
date_hierarchy = ('created',)
admin.site.register(Subscriber, SubscriberAdmin) | from django.contrib import admin
from django_rocket.models import Subscriber
class SubscriberAdmin(admin.ModelAdmin):
list_display = ('email', 'created')
date_hierarchy = 'created'
admin.site.register(Subscriber, SubscriberAdmin) | Correct mention to date hierachy | Correct mention to date hierachy
| Python | mit | mariocesar/django-rocket,mariocesar/django-rocket |
7269322106911fcb1fb71160421fc3e011fbee1d | byceps/config_defaults.py | byceps/config_defaults.py | """
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_DASHBOARD_POLL_INTERVAL = 2500
RQ_DASHBOARD_WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
| """
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_DASHBOARD_POLL_INTERVAL = 2500
RQ_DASHBOARD_WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
| Remove superseded config default for `ROOT_REDIRECT_STATUS_CODE` | Remove superseded config default for `ROOT_REDIRECT_STATUS_CODE`
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps |
cab0f9ea3471cf88dd03da7a243ae55579b44b65 | client.py | client.py | #!/usr/env/bin python
import RPi.GPIO as io
import requests
import sys
class Switch(object):
def __init__(self, **kwargs):
self.pin = kwargs["pin"]
io.setup(self.pin, io.IN)
@property
def is_on(self):
return io.input(self.pin)
PINS = (8, 16, 18)
switches = set()
def has_free():
global switches
return not all([s.is_on for s in switches])
def call_api(is_on):
r = requests.post("SERVER_ADDRESS",
params={"is_free": "yes" if is_on else "no"})
if __name__ == "__main__":
io.setmode(io.BOARD)
for pin in PINS:
switches.add(Switch(pin=pin))
try:
previous_state = has_free()
while True:
state = has_free()
if state is not previous_state:
call_api(state)
previous_state = state
except KeyboardInterrupt:
pass
| #!/usr/env/bin python
import RPi.GPIO as io
import sys
class Switch(object):
def __init__(self, **kwargs):
self.pin = kwargs["pin"]
io.setup(self.pin, io.IN)
@property
def is_on(self):
return io.input(self.pin)
PINS = (8, 16, 18)
server_url = sys.argv[1]
switches = set()
def has_free():
global switches
return not all([s.is_on for s in switches])
def call_api(url, is_on):
r = requests.post(url, params={"is_free": "yes" if is_on else "no"})
if __name__ == "__main__":
io.setmode(io.BOARD)
for pin in PINS:
switches.add(Switch(pin=pin))
try:
previous_state = has_free()
while True:
state = has_free()
if state is not previous_state:
call_api(server_url, state)
previous_state = state
except KeyboardInterrupt:
pass
| Set server URL with command line argument | Set server URL with command line argument
| Python | mit | madebymany/isthetoiletfree |
22ae576872e0cbe2c42e9ec4bddc050a22780531 | bika/lims/upgrade/to1010.py | bika/lims/upgrade/to1010.py | import logging
from Acquisition import aq_base
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def addBatches(tool):
"""
"""
portal = aq_parent(aq_inner(tool))
portal_catalog = getToolByName(portal, 'portal_catalog')
setup = portal.portal_setup
# reimport Types Tool to add BatchFolder and Batch
setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
# reimport Workflows to add bika_batch_workflow
setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow')
typestool = getToolByName(portal, 'portal_types')
workflowtool = getToolByName(portal, 'portal_workflow')
# Add the BatchFolder at /batches
typestool.constructContent(type_name="BatchFolder",
container=portal,
id='batches',
title='Batches')
obj = portal['batches']
obj.unmarkCreationFlag()
obj.reindexObject()
# and place it after ClientFolder
portal.moveObjectToPosition('batches', portal.objectIds().index('clients'))
# add BatchID to all AnalysisRequest objects.
# When the objects are reindexed, BatchUID will also be populated
proxies = portal_catalog(portal_type="AnalysiRequest")
ars = (proxy.getObject() for proxy in proxies)
for ar in ars:
ar.setBatchID(None)
return True
| import logging
from Acquisition import aq_base
from Acquisition import aq_inner
from Acquisition import aq_parent
from Products.CMFCore.utils import getToolByName
def addBatches(tool):
"""
"""
portal = aq_parent(aq_inner(tool))
portal_catalog = getToolByName(portal, 'portal_catalog')
setup = portal.portal_setup
# reimport Types Tool to add BatchFolder and Batch
setup.runImportStepFromProfile('profile-bika.lims:default', 'typeinfo')
# reimport Workflows to add bika_batch_workflow
setup.runImportStepFromProfile('profile-bika.lims:default', 'workflow')
typestool = getToolByName(portal, 'portal_types')
workflowtool = getToolByName(portal, 'portal_workflow')
# Add the BatchFolder at /batches
typestool.constructContent(type_name="BatchFolder",
container=portal,
id='batches',
title='Batches')
obj = portal['batches']
obj.unmarkCreationFlag()
obj.reindexObject()
# and place it after ClientFolder
portal.moveObjectToPosition('batches', portal.objectIds().index('clients'))
# add Batch to all AnalysisRequest objects.
# When the objects are reindexed, BatchUID will also be populated
proxies = portal_catalog(portal_type="AnalysiRequest")
ars = (proxy.getObject() for proxy in proxies)
for ar in ars:
ar.setBatch(None)
return True
| Fix 1010 upgrade step (setBatchID -> setBatch) | Fix 1010 upgrade step (setBatchID -> setBatch)
| Python | agpl-3.0 | DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS,rockfruit/bika.lims,anneline/Bika-LIMS,rockfruit/bika.lims,anneline/Bika-LIMS,veroc/Bika-LIMS,anneline/Bika-LIMS,labsanmartin/Bika-LIMS,veroc/Bika-LIMS,labsanmartin/Bika-LIMS,veroc/Bika-LIMS,labsanmartin/Bika-LIMS |
384beaa77e2eaad642ec7f764acd09c2c3e04350 | res_company.py | res_company.py | from openerp.osv import osv, fields
from openerp.tools.translate import _
class res_company(osv.Model):
_inherit = "res.company"
_columns = {
'remittance_letter_top': fields.text(
_('Remittance Letter - top message'),
help=_('Message to write at the top of Remittance Letter '
'reports. Available variables: "$iban" for the IBAN; "$date" for '
'the payment date. HTML tags are allowed.')
),
'remittance_letter_bottom': fields.text(
_('Remittance Letter - bottom message'),
help=_('Message to write at the bottom of Remittance Letter '
'reports. HTML tags are allowed.')
),
}
| from openerp.osv import osv, fields
from openerp.tools.translate import _
class res_company(osv.Model):
_inherit = "res.company"
_columns = {
'remittance_letter_top': fields.text(
_('Remittance Letter - top message'),
help=_('Message to write at the top of Remittance Letter '
'reports. Available variables: "$iban" for the IBAN; "$date" for '
'the payment date. HTML tags are allowed.'),
translate=True),
'remittance_letter_bottom': fields.text(
_('Remittance Letter - bottom message'),
help=_('Message to write at the bottom of Remittance Letter '
'reports. HTML tags are allowed.'),
translate=True),
}
| Make Remittance Letter config messages translatable | Make Remittance Letter config messages translatable
| Python | agpl-3.0 | xcgd/account_streamline |
550a3f2b402f841d740cdbd6a25e832aab0fd974 | oneflow/settings/chani.py | oneflow/settings/chani.py | # -*- coding: utf-8 -*-
# Settings for 1flow.net (local development)
MAIN_SERVER = '127.0.0.1'
from sparks.django.settings import include_snippets
include_snippets(
(
# Don't forget to deactivate nobother when we'ge got time to
# fix other's bugs. Just kidding…
'000_nobother',
'00_development',
# Activate this to test 404/500…
#'00_production',
'1flow_io',
'common',
'constance',
'api_keys',
'databases',
'cache',
'celery',
'mail_development',
'common_development',
'rosetta',
#'djdt',
),
__file__, globals()
)
ALLOWED_HOSTS += [
'lil.1flow.io',
'chani.licorn.org',
'duncan.licorn.org',
'leto.licorn.org',
'gurney.licorn.org',
]
# We need an official public host name for all `social_auth` backends.
SITE_DOMAIN = 'lil.1flow.io'
EMAIL_HOST = 'gurney'
#EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
#EMAIL_FILE_PATH = '/tmp/1flow.mail'
| # -*- coding: utf-8 -*-
# Settings for 1flow.net (local development)
MAIN_SERVER = '127.0.0.1'
import socket
from sparks.django.settings import include_snippets
include_snippets(
(
# Don't forget to deactivate nobother when we'ge got time to
# fix other's bugs. Just kidding…
'000_nobother',
'00_development',
# Activate this to test 404/500…
#'00_production',
'1flow_io',
'common',
'constance',
'api_keys',
'databases',
'cache',
'celery',
'mail_development',
'common_development',
'rosetta',
#'djdt',
),
__file__, globals()
)
ALLOWED_HOSTS += [
'lil.1flow.io',
'big.1flow.io',
'chani.licorn.org',
'duncan.licorn.org',
'leto.licorn.org',
'gurney.licorn.org',
]
# We need an official public host name for all `social_auth` backends.
if socket.gethostname().lower() == 'duncan':
SITE_DOMAIN = 'big.1flow.io'
else:
SITE_DOMAIN = 'lil.1flow.io'
EMAIL_HOST = 'gurney'
#EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
#EMAIL_FILE_PATH = '/tmp/1flow.mail'
| Make lil&big merged configuration hostname-aware for SITE_DOMAIN, this fixes the bad hostname in JS bookmarklets. | Make lil&big merged configuration hostname-aware for SITE_DOMAIN, this fixes the bad hostname in JS bookmarklets. | Python | agpl-3.0 | 1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow |
a0c656f89829ca18c383f1884e3186103b9e5fa6 | fabfile.py | fabfile.py | from fabric.api import cd, run, task
try:
import fabfile_local
_pyflakes = fabfile_local
except ImportError:
pass
@task
def update():
with cd("~/vagrant-installers"):
run("git pull")
| from fabric.api import cd, env, run, task
try:
import fabfile_local
_pyflakes = fabfile_local
except ImportError:
pass
@task
def update():
with cd("~/vagrant-installers"):
run("git pull")
@task
def all():
"Run the task against all hosts."
for _, value in env.roledefs.iteritems():
env.hosts.extend(value)
@task
def role(name):
"Set the hosts to a specific role."
env.hosts = env.roledefs[name]
| Allow the targetting of specific roles with fabric | Allow the targetting of specific roles with fabric
| Python | mit | redhat-developer-tooling/vagrant-installers,mitchellh/vagrant-installers,chrisroberts/vagrant-installers,mitchellh/vagrant-installers,chrisroberts/vagrant-installers,mitchellh/vagrant-installers,mitchellh/vagrant-installers,chrisroberts/vagrant-installers,chrisroberts/vagrant-installers,redhat-developer-tooling/vagrant-installers,mitchellh/vagrant-installers,chrisroberts/vagrant-installers,redhat-developer-tooling/vagrant-installers,chrisroberts/vagrant-installers,redhat-developer-tooling/vagrant-installers,mitchellh/vagrant-installers |
813dd27a2057d2e32726ff6b43ab8ca1411303c7 | fabfile.py | fabfile.py | from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.project_root = '/opt/sana.protocol_builder'
def prepare_deploy():
local('python sana_builder/manage.py syncdb')
local('python sana_builder/manage.py test')
local('git push')
def deploy():
with cd(env.project_root), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Migrating database...'))
run('python sana_builder/manage.py syncdb')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
| from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.project_root = '/opt/sana.protocol_builder'
def prepare_deploy():
local('python sana_builder/manage.py syncdb')
local('python sana_builder/manage.py test')
local('git push')
def deploy():
prepare_deploy()
with cd(env.project_root), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Migrating database...'))
run('python sana_builder/manage.py syncdb')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
| Prepare for deploy in deploy script. | Prepare for deploy in deploy script.
| Python | bsd-3-clause | SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder |
e8f8407ee422375af07027af2846a95c9cfaad37 | fabfile.py | fabfile.py | #!/usr/bin/env python
from fabric.api import env, run, sudo, task
from fabric.context_managers import cd, prefix
from fabric.contrib.project import rsync_project
env.use_ssh_config = True
home = '~/jarvis2'
@task
def pull_code():
with cd(home):
run('git pull --rebase')
@task
def push_code():
rsync_project(local_dir='.', remote_dir=home, exclude=('.git', '.vagrant'),
extra_opts='--filter=":- .gitignore"')
@task
def update_dependencies():
with prefix('workon jarvis2'):
run(('pip install --quiet --use-mirrors --upgrade'
' -r {home}/requirements.txt').format(home=home))
@task
def restart_server():
sudo('/etc/init.d/uwsgi restart', pty=False)
@task
def restart_client():
run('pkill -x midori')
@task(default=True)
def deploy(update_deps=False):
push_code()
if update_deps:
update_dependencies()
restart_server()
restart_client()
@task
def full_deploy():
deploy(True)
| #!/usr/bin/env python
from fabric.api import env, run, sudo, task
from fabric.context_managers import cd, prefix
from fabric.contrib.project import rsync_project
env.use_ssh_config = True
home = '~/jarvis2'
@task
def pull_code():
with cd(home):
run('git pull --rebase')
@task
def push_code():
rsync_project(local_dir='.', remote_dir=home, exclude=('.git', '.vagrant'),
extra_opts='--checksum --filter=":- .gitignore"')
@task
def update_dependencies():
with prefix('workon jarvis2'):
run(('pip install --quiet --use-mirrors --upgrade'
' -r {home}/requirements.txt').format(home=home))
@task
def restart_server():
sudo('/etc/init.d/uwsgi restart', pty=False)
@task
def restart_client():
run('pkill -x midori')
@task(default=True)
def deploy(update_deps=False):
push_code()
if update_deps:
update_dependencies()
restart_server()
restart_client()
@task
def full_deploy():
deploy(True)
| Use checksum when pushing code with rsync | Use checksum when pushing code with rsync
| Python | mit | Foxboron/Frank,Foxboron/Frank,mpolden/jarvis2,Foxboron/Frank,martinp/jarvis2,martinp/jarvis2,mpolden/jarvis2,mpolden/jarvis2,martinp/jarvis2 |
b374221d8d0e902494066d666570c1a882c962bc | s3img_magic.py | s3img_magic.py | from IPython.display import Image
import boto
def parse_s3_uri(uri):
if uri.startswith('s3://'):
uri = uri[5:]
return uri.split('/', 1)
def get_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
conn = boto.connect_s3()
bucket = conn.get_bucket(bucket_name)
return bucket.get_key(key_name)
def s3img(uri):
key = get_s3_key(uri)
data = key.get_contents_as_string()
return Image(data=data)
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
| from StringIO import StringIO
from IPython.core.magic import Magics, magics_class, line_magic
from IPython.display import Image
import boto
def parse_s3_uri(uri):
if uri.startswith('s3://'):
uri = uri[5:]
return uri.split('/', 1)
def get_s3_bucket(bucket_name):
conn = boto.connect_s3()
return conn.get_bucket(bucket_name)
def get_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
bucket = get_s3_bucket(bucket_name)
return bucket.get_key(key_name)
def get_or_create_s3_key(uri):
bucket_name, key_name = parse_s3_uri(uri)
bucket = get_s3_bucket(bucket_name)
return bucket.new_key(key_name)
def s3img(uri):
key = get_s3_key(uri)
data = key.get_contents_as_string()
return Image(data=data)
@magics_class
class S3ImageSaver(Magics):
@line_magic
def s3img_save(self, line):
"""BEWARE: this magic will happily overwrite any S3 URI"""
fig_name, uri = line.split(' ', 1)
fig = self.shell.ev(fig_name)
tmp = StringIO()
fig.savefig(tmp)
key = get_or_create_s3_key(uri)
key.set_contents_from_string(tmp.getvalue())
def load_ipython_extension(ipython):
ipython.register_magic_function(s3img, 'line')
ipython.register_magics(S3ImageSaver)
| Add magic to save a Matplotlib figure to S3 | Add magic to save a Matplotlib figure to S3
| Python | mit | AustinRochford/s3img-ipython-magic,AustinRochford/s3img-ipython-magic |
db06039ecb94100bbecb23b5fdee13e306458809 | atlassian/__init__.py | atlassian/__init__.py | import logging
from urllib.parse import urlsplit, urljoin
from requests import get
l = logging.getLogger(__name__)
#TODO: move this somewhere sensible
#TODO: useful error handling (CLI...)
class HTTPClient:
def __init__(self, base, user=None, password=None):
self.base = base
self.user = user
self.password = password
def get(self, url):
request_url = self.base + url
l.debug("Will now get: " + str(request_url))
if self.user is not None:
response = get(request_url, auth=(self.user, self.password))
else:
response = get(request_url)
assert response.status_code is 200, 'Error when requesting {}, response code {}.'.format(request_url, response.status_code)
# TODO: Need better error handling
return response.json()
| import logging
from urllib.parse import urlsplit, urljoin
from requests import get
#TODO: move this somewhere sensible
#TODO: useful error handling (CLI...)
class HTTPClient:
def __init__(self, base, user=None, password=None):
self.base = base
self.user = user
self.password = password
def get(self, url):
urlparts = urlsplit(url)
request_url = urljoin(self.base, urlparts.path)
if urlparts.query is not None:
request_url += "?" + urlparts.query
if self.user is not None:
response = get(request_url, auth=(self.user, self.password))
else:
response = get(request_url)
assert response.status_code is 200, 'Error when requesting {}.'.format(request_url)
return response.json()
| Fix big fuckup from last commit | Fix big fuckup from last commit
| Python | mit | victorhahncastell/atlassian_permissions,victorhahncastell/atlassian_permissions |
c58ac76b2d0e575068a050f87f1cc0eb6ef09014 | autocloud/__init__.py | autocloud/__init__.py | # -*- coding: utf-8 -*-
import ConfigParser
import os
DEBUG = True
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
config = ConfigParser.RawConfigParser()
if DEBUG:
config.read("{PROJECT_ROOT}/config/autocloud.cfg".format(
PROJECT_ROOT=PROJECT_ROOT))
else:
config.read('/etc/autocloud/autocloud.cfg')
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
if DEBUG:
REDIS_CONFIG_FILEPATH = "{PROJECT_ROOT}/config/redis_server.json"
else:
REDIS_CONFIG_FILEPATH = config.get('autocloud', 'redis_config_filepath')
JENKINS_BASE_URL = config.get('jenkins', 'baseurl')
JENKINS_USERNAME = config.get('jenkins', 'username')
JENKINS_TOKEN = config.get('jenkins', 'token')
JENKINS_JOB_NAME = config.get('jenkins', 'job_name')
HOST = config.get('autocloud', 'host') or '127.0.0.1'
PORT = int(config.get('autocloud', 'port')) or 5000
DEBUG = config.get('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
| # -*- coding: utf-8 -*-
import ConfigParser
import os
DEBUG = True
PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__))
config = ConfigParser.RawConfigParser()
if DEBUG:
config.read("{PROJECT_ROOT}/config/autocloud.cfg".format(
PROJECT_ROOT=PROJECT_ROOT))
else:
config.read('/etc/autocloud/autocloud.cfg')
KOJI_SERVER_URL = config.get('autocloud', 'koji_server_url')
BASE_KOJI_TASK_URL = config.get('autocloud', 'base_koji_task_url')
if DEBUG:
REDIS_CONFIG_FILEPATH = "{PROJECT_ROOT}/config/redis_server.json".format(
PROJECT_ROOT=PROJECT_ROOT)
else:
REDIS_CONFIG_FILEPATH = config.get('autocloud', 'redis_config_filepath')
JENKINS_BASE_URL = config.get('jenkins', 'baseurl')
JENKINS_USERNAME = config.get('jenkins', 'username')
JENKINS_TOKEN = config.get('jenkins', 'token')
JENKINS_JOB_NAME = config.get('jenkins', 'job_name')
HOST = config.get('autocloud', 'host') or '127.0.0.1'
PORT = int(config.get('autocloud', 'port')) or 5000
DEBUG = config.get('autocloud', 'debug')
SQLALCHEMY_URI = config.get('sqlalchemy', 'uri')
| Add redis config path format | Add redis config path format
| Python | agpl-3.0 | maxamillion/autocloud,maxamillion/autocloud,kushaldas/autocloud,maxamillion/autocloud,kushaldas/autocloud,kushaldas/autocloud,maxamillion/autocloud,kushaldas/autocloud |
b7aa7e85064430dc95e74a0f676f0484fee12733 | tests/cli/test_pinout.py | tests/cli/test_pinout.py | from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
str = type('')
import pytest
from gpiozero.cli import pinout
def test_args_incorrect():
with pytest.raises(SystemExit) as ex:
pinout.parse_args(['--nonexistentarg'])
def test_args_color():
args = pinout.parse_args([])
assert args.color is None
args = pinout.parse_args(['--color'])
assert args.color is True
args = pinout.parse_args(['--monochrome'])
assert args.color is False
def test_args_revision():
args = pinout.parse_args(['--revision', '000d'])
assert args.revision == '000d'
def test_help(capsys):
with pytest.raises(SystemExit) as ex:
pinout.parse_args(['--help'])
out, err = capsys.readouterr()
assert 'GPIO pinout' in out
| from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
str = type('')
import pytest
from gpiozero.cli.pinout import main
def test_args_incorrect():
with pytest.raises(SystemExit) as ex:
main(['pinout', '--nonexistentarg'])
def test_args_color():
args = main.parser.parse_args([])
assert args.color is None
args = main.parser.parse_args(['--color'])
assert args.color is True
args = main.parser.parse_args(['--monochrome'])
assert args.color is False
def test_args_revision():
args = main.parser.parse_args(['--revision', '000d'])
assert args.revision == '000d'
def test_help(capsys):
with pytest.raises(SystemExit) as ex:
main(['pinout', '--help'])
out, err = capsys.readouterr()
assert 'GPIO pinout' in out
| Fix up pinout tests so they work with new structure | Fix up pinout tests so they work with new structure
| Python | bsd-3-clause | MrHarcombe/python-gpiozero,waveform80/gpio-zero,RPi-Distro/python-gpiozero |
0ef346389b680e81ab618d4d782239640c1926f5 | tests/test_collection.py | tests/test_collection.py | import unittest
from indigo.models import Collection
from indigo.models.errors import UniqueException
from nose.tools import raises
class NodeTest(unittest.TestCase):
def test_a_create_root(self):
Collection.create(name="test_root", parent=None, path="/")
coll = Collection.find("test_root")
assert coll.name == "test_root"
assert coll.path == '/'
assert coll.parent is None
# Make sure this is the root collection
root = Collection.get_root_collection()
assert root.id == coll.id
def test_create_with_children(self):
coll = Collection.find("test_root")
assert coll.name == "test_root"
assert coll.is_root
child1 = Collection.create(name="child1", parent=str(coll.id))
child2 = Collection.create(name="child2", parent=str(coll.id))
assert child1.get_parent_collection().id == coll.id
assert child2.get_parent_collection().id == coll.id
assert child1.path == '/child1/'
assert child2.path == '/child2/'
children = coll.get_child_collections()
assert len(children) == 2
assert coll.get_child_collection_count() == 2
assert str(children[0].id) == str(child1.id)
assert str(children[1].id) == str(child2.id) | import unittest
from indigo.models import Collection
from indigo.models.errors import UniqueException
from nose.tools import raises
class NodeTest(unittest.TestCase):
def test_a_create_root(self):
Collection.create(name="test_root", parent=None, path="/")
coll = Collection.find("test_root")
assert coll.name == "test_root"
assert coll.path == '/'
assert coll.parent is None
# Make sure this is the root collection
root = Collection.get_root_collection()
assert root.id == coll.id
def test_create_with_children(self):
coll = Collection.find("test_root")
assert coll.name == "test_root"
assert coll.is_root
child1 = Collection.create(name="child1", parent=str(coll.id))
child2 = Collection.create(name="child2", parent=str(coll.id))
assert child1.get_parent_collection().id == coll.id
assert child2.get_parent_collection().id == coll.id
assert child1.path == '/child1/'
assert child2.path == '/child2/'
children = coll.get_child_collections()
assert len(children) == 2
assert coll.get_child_collection_count() == 2
| Remove unnecessary test of collection children | Remove unnecessary test of collection children
| Python | agpl-3.0 | UMD-DRASTIC/drastic |
afa10a27aa1fe1eaa719d988902c2f3f4d5d0928 | webapp/controllers/contact.py | webapp/controllers/contact.py | # -*- coding: utf-8 -*-
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
def index():
return dict()
| # -*- coding: utf-8 -*-
from opentreewebapputil import (get_opentree_services_method_urls,
fetch_current_TNRS_context_names)
### required - do no delete
def user(): return dict(form=auth())
def download(): return response.download(request,db)
def call(): return service()
### end requires
default_view_dict = get_opentree_services_method_urls(request)
default_view_dict['taxonSearchContextNames'] = fetch_current_TNRS_context_names(request)
def index():
return default_view_dict
| Fix missing search-context list on Contact page. | Fix missing search-context list on Contact page.
| Python | bsd-2-clause | OpenTreeOfLife/opentree,OpenTreeOfLife/opentree,OpenTreeOfLife/opentree,OpenTreeOfLife/opentree,OpenTreeOfLife/opentree,OpenTreeOfLife/opentree |
c7dbf93c123d4b055fcd4a73e5d3374e48ee248a | pegasus/service/server.py | pegasus/service/server.py | import os
import logging
from optparse import OptionParser
from pegasus.service import app, em
from pegasus.service.command import Command
class ServerCommand(Command):
usage = "%prog [options]"
description = "Start Pegasus Service"
def __init__(self):
Command.__init__(self)
self.parser.add_option("-d", "--debug", action="store_true", dest="debug",
default=None, help="Enable debugging")
def run(self):
if self.options.debug:
app.config.update(DEBUG=True)
logging.basicConfig(level=logging.INFO)
# Make sure the environment is OK for the ensemble manager
em.check_environment()
# We only start the ensemble manager if we are not debugging
# or if we are debugging and Werkzeug is restarting. This
# prevents us from having two ensemble managers running in
# the debug case.
WERKZEUG_RUN_MAIN = os.environ.get('WERKZEUG_RUN_MAIN') == 'true'
DEBUG = app.config.get("DEBUG", False)
if (not DEBUG) or WERKZEUG_RUN_MAIN:
mgr = em.EnsembleManager()
mgr.start()
app.run(port=app.config["SERVER_PORT"], host=app.config["SERVER_HOST"])
def main():
ServerCommand().main()
| import os
import logging
from optparse import OptionParser
from pegasus.service import app, em
from pegasus.service.command import Command
log = logging.getLogger("server")
class ServerCommand(Command):
usage = "%prog [options]"
description = "Start Pegasus Service"
def __init__(self):
Command.__init__(self)
self.parser.add_option("-d", "--debug", action="store_true", dest="debug",
default=None, help="Enable debugging")
def run(self):
if self.options.debug:
app.config.update(DEBUG=True)
logging.basicConfig(level=logging.INFO)
# We only start the ensemble manager if we are not debugging
# or if we are debugging and Werkzeug is restarting. This
# prevents us from having two ensemble managers running in
# the debug case.
WERKZEUG_RUN_MAIN = os.environ.get('WERKZEUG_RUN_MAIN') == 'true'
DEBUG = app.config.get("DEBUG", False)
if (not DEBUG) or WERKZEUG_RUN_MAIN:
# Make sure the environment is OK for the ensemble manager
try:
em.check_environment()
except em.EMException, e:
log.warning("%s: Ensemble manager disabled" % e.message)
else:
mgr = em.EnsembleManager()
mgr.start()
app.run(port=app.config["SERVER_PORT"], host=app.config["SERVER_HOST"])
def main():
ServerCommand().main()
| Allow service to start without EM if Condor and Pegasus are missing | Allow service to start without EM if Condor and Pegasus are missing
| Python | apache-2.0 | pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus-service,pegasus-isi/pegasus-service,pegasus-isi/pegasus-service,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus |
6858e4a2e2047c906a3b8f69b7cd7b04a0cbf666 | pivoteer/writer/censys.py | pivoteer/writer/censys.py | """
Classes and functions for writing IndicatorRecord objects with a record type of "CE" (Censys Record)
"""
from pivoteer.writer.core import CsvWriter
class CensysCsvWriter(CsvWriter):
"""
A CsvWriter implementation for IndicatorRecords with a record type of "CE" (Censys Record)
"""
def __init__(self, writer):
"""
Create a new CsvWriter for Censys Records using the given writer.
:param writer: The writer
"""
super(CensysCsvWriter, self).__init__(writer)
def create_title_rows(self, indicator, records):
yield ["Certificate Search Results"]
def create_header(self):
return ["Subject", "Issuer", "SHA256", "Validity Start", "Validity End"]
def create_rows(self, record):
info = record["info"]
records = info["records"]
for record in records:
parsed = record["parsed"]
subject = parsed["subject_dn"]
issuer = parsed["issuer_dn"]
sha256 = parsed["fingerprint_sha256"]
validity = parsed["validity"]
start = validity["start"]
end = validity["end"]
yield [subject, issuer, sha256, start, end]
| """
Classes and functions for writing IndicatorRecord objects with a record type of "CE" (Censys Record)
"""
from pivoteer.writer.core import CsvWriter
class CensysCsvWriter(CsvWriter):
"""
A CsvWriter implementation for IndicatorRecords with a record type of "CE" (Censys Record)
"""
def __init__(self, writer):
"""
Create a new CsvWriter for Censys Records using the given writer.
:param writer: The writer
"""
super(CensysCsvWriter, self).__init__(writer)
def create_title_rows(self, indicator, records):
yield ["Certificate Search Results"]
def create_header(self):
return ["Subject", "Issuer", "SHA256", "Validity Start", "Validity End"]
def create_rows(self, record):
if (record is not None and len(record) > 0):
info = record["info"]
records = info["records"]
for record in records:
parsed = record["parsed"]
subject = parsed["subject_dn"]
issuer = parsed["issuer_dn"]
sha256 = parsed["fingerprint_sha256"]
validity = parsed["validity"]
start = validity["start"]
end = validity["end"]
yield [subject, issuer, sha256, start, end]
| Resolve issues with exporting empty dataset for certificate list | Resolve issues with exporting empty dataset for certificate list
| Python | mit | gdit-cnd/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,gdit-cnd/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,LindaTNguyen/RAPID,LindaTNguyen/RAPID |
4a6a2155878d309e6bc96a948811daafa4a92908 | protocols/no_reconnect.py | protocols/no_reconnect.py | try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return super(NoReconnectProto, self).reconnect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
| try:
from .. import api, shared as G
from ... import editor
from ..exc_fmt import str_e
from ..protocols import floo_proto
except (ImportError, ValueError):
from floo import editor
from floo.common import api, shared as G
from floo.common.exc_fmt import str_e
from floo.common.protocols import floo_proto
PORT_BLOCK_MSG = '''The Floobits plugin can't work because outbound traffic on TCP port 3448 is being blocked.
See https://%s/help/network'''
class NoReconnectProto(floo_proto.FlooProtocol):
def reconnect(self):
try:
api.get_workspace(self.host, 'Floobits', 'doesnotexist')
except Exception as e:
print(str_e(e))
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
else:
if not G.OUTBOUND_FILTERING:
G.OUTBOUND_FILTERING = True
return self.connect()
editor.error_message('Something went wrong. See https://%s/help/floorc to complete the installation.' % self.host)
self.stop()
| Call connect instead of reconnect. | Call connect instead of reconnect.
| Python | apache-2.0 | Floobits/plugin-common-python |
87d1801fefcd048f60c944c28bfc005101c5704b | dynd/tests/test_nd_groupby.py | dynd/tests/test_nd_groupby.py | import sys
import unittest
from dynd import nd, ndt
class TestGroupBy(unittest.TestCase):
def test_immutable(self):
a = nd.array([
('x', 0),
('y', 1),
('x', 2),
('x', 3),
('y', 4)],
dtype='{A: string; B: int32}').eval_immutable()
gb = nd.groupby(a, nd.fields(a, 'A'))
self.assertEqual(nd.as_py(gb.groups), [{'A': 'x'}, {'A': 'y'}])
self.assertEqual(nd.as_py(gb), [
[{'A': 'x', 'B': 0},
{'A': 'x', 'B': 2},
{'A': 'x', 'B': 3}],
[{'A': 'y', 'B': 1},
{'A': 'y', 'B': 4}]])
if __name__ == '__main__':
unittest.main()
| import sys
import unittest
from dynd import nd, ndt
class TestGroupBy(unittest.TestCase):
def test_immutable(self):
a = nd.array([
('x', 0),
('y', 1),
('x', 2),
('x', 3),
('y', 4)],
dtype='{A: string; B: int32}').eval_immutable()
gb = nd.groupby(a, nd.fields(a, 'A'))
self.assertEqual(nd.as_py(gb.groups), [{'A': 'x'}, {'A': 'y'}])
self.assertEqual(nd.as_py(gb), [
[{'A': 'x', 'B': 0},
{'A': 'x', 'B': 2},
{'A': 'x', 'B': 3}],
[{'A': 'y', 'B': 1},
{'A': 'y', 'B': 4}]])
def test_grouped_slices(self):
a = nd.asarray([[1, 2, 3], [1, 4, 5]])
gb = nd.groupby(a[:, 1:], a[:, 0])
self.assertEqual(nd.as_py(gb.groups), [1])
self.assertEqual(nd.as_py(gb), [[[2, 3], [4, 5]]])
a = nd.asarray([[1, 2, 3], [3, 1, 7], [1, 4, 5], [2, 6, 7], [3, 2, 5]])
gb = nd.groupby(a[:, 1:], a[:, 0])
self.assertEqual(nd.as_py(gb.groups), [1, 2, 3])
self.assertEqual(nd.as_py(gb), [[[2, 3], [4, 5]],
[[6, 7]],
[[1, 7], [2, 5]]])
if __name__ == '__main__':
unittest.main()
| Add some more simple nd.groupby tests | Add some more simple nd.groupby tests
| Python | bsd-2-clause | cpcloud/dynd-python,izaid/dynd-python,aterrel/dynd-python,michaelpacer/dynd-python,mwiebe/dynd-python,insertinterestingnamehere/dynd-python,aterrel/dynd-python,cpcloud/dynd-python,cpcloud/dynd-python,izaid/dynd-python,aterrel/dynd-python,mwiebe/dynd-python,michaelpacer/dynd-python,mwiebe/dynd-python,pombredanne/dynd-python,michaelpacer/dynd-python,insertinterestingnamehere/dynd-python,ContinuumIO/dynd-python,insertinterestingnamehere/dynd-python,pombredanne/dynd-python,ContinuumIO/dynd-python,izaid/dynd-python,ContinuumIO/dynd-python,cpcloud/dynd-python,izaid/dynd-python,michaelpacer/dynd-python,mwiebe/dynd-python,pombredanne/dynd-python,aterrel/dynd-python,pombredanne/dynd-python,ContinuumIO/dynd-python,insertinterestingnamehere/dynd-python |
7b935b23e17ef873a060fdfbefbfdf232fe8b8de | git_release/release.py | git_release/release.py | import subprocess
from git_release import errors, git_helpers
def _parse_tag(tag):
major, minor = tag.split('.')
return int(major), int(minor)
def _increment_tag(tag, release_type):
major, minor = _parse_tag(tag)
if release_type == 'major':
new_major = major + 1
new_minor = 0
else:
new_major = major
new_minor = minor + 1
return '{}.{}'.format(new_major, new_minor)
def release(release_type, signed):
if not git_helpers.is_master():
raise errors.NotMasterException("Current branch is not master.\nAborting.")
tag = git_helpers.get_current_tag()
if not tag:
raise errors.NoTagException("Unable to get current tag.\nAborting.")
new_tag = _increment_tag(tag)
git_helpers.tag(signed, new_tag)
| import subprocess
from git_release import errors, git_helpers
def _parse_tag(tag):
major, minor = tag.split('.')
return int(major), int(minor)
def _increment_tag(tag, release_type):
major, minor = _parse_tag(tag)
if release_type == 'major':
new_major = major + 1
new_minor = 0
else:
new_major = major
new_minor = minor + 1
return '{}.{}'.format(new_major, new_minor)
def release(release_type, signed):
if not git_helpers.is_master():
raise errors.NotMasterException("Current branch is not master.\nAborting.")
tag = git_helpers.get_current_tag()
if not tag:
raise errors.NoTagException("Unable to get current tag.\nAborting.")
new_tag = _increment_tag(tag, release_type)
git_helpers.tag(signed, new_tag)
| Add missing argument to _increment_tag call | Add missing argument to _increment_tag call
| Python | mit | Authentise/git-release |
14f4fe95be0501142f929a9b7bec807fd14e3d6f | eva/layers/residual_block.py | eva/layers/residual_block.py | from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from keras.engine.topology import merge
from eva.layers.masked_convolution2d import MaskedConvolution2D
def ResidualBlock(model, filters):
# 2h -> h
block = Convolution2D(filters//2, 1, 1)(model)
block = PReLU()(block)
# h 3x3 -> h
block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block)
block = PReLU()(block)
# h -> 2h
block = Convolution2D(filters, 1, 1)(block)
return PReLU()(merge([model, block], mode='sum'))
def ResidualBlockList(model, filters, length):
for _ in range(length):
model = ResidualBlock(model, filters)
return model
| from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
def ResidualBlock(model, filters):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
def ResidualBlockList(model, filters, length):
for _ in range(length):
model = ResidualBlock(model, filters)
return model
| Fix residual blocks to be on-par with paper. | Fix residual blocks to be on-par with paper.
| Python | apache-2.0 | israelg99/eva |
7f5d6e4386e3a80db5cfcbf961c7603b0c78cc52 | openxc/sources/serial.py | openxc/sources/serial.py | """A virtual serial port data source."""
from __future__ import absolute_import
import logging
try:
import serial
except ImportError:
LOG.debug("serial library not installed, can't use serial interface")
from .base import BytestreamDataSource, DataSourceError
LOG = logging.getLogger(__name__)
class SerialDataSource(BytestreamDataSource):
"""A data source reading from a serial port, which could be implemented
with a USB to Serial or Bluetooth adapter.
"""
DEFAULT_PORT = "/dev/ttyUSB0"
DEFAULT_BAUDRATE = 115200
def __init__(self, callback=None, port=None, baudrate=None):
"""Initialize a connection to the serial device.
Kwargs:
port - optionally override the default virtual COM port
baudrate - optionally override the default baudrate
Raises:
DataSourceError if the serial device cannot be opened.
"""
super(SerialDataSource, self).__init__(callback)
port = port or self.DEFAULT_PORT
baudrate = baudrate or self.DEFAULT_BAUDRATE
try:
self.device = serial.Serial(port, baudrate, rtscts=True)
except serial.SerialException as e:
raise DataSourceError("Unable to open serial device at port "
"%s: %s" % (port, e))
else:
LOG.debug("Opened serial device at %s", port)
def _read(self):
return self.device.readline()
| """A virtual serial port data source."""
from __future__ import absolute_import
import logging
from .base import BytestreamDataSource, DataSourceError
LOG = logging.getLogger(__name__)
try:
import serial
except ImportError:
LOG.debug("serial library not installed, can't use serial interface")
class SerialDataSource(BytestreamDataSource):
"""A data source reading from a serial port, which could be implemented
with a USB to Serial or Bluetooth adapter.
"""
DEFAULT_PORT = "/dev/ttyUSB0"
DEFAULT_BAUDRATE = 115200
def __init__(self, callback=None, port=None, baudrate=None):
"""Initialize a connection to the serial device.
Kwargs:
port - optionally override the default virtual COM port
baudrate - optionally override the default baudrate
Raises:
DataSourceError if the serial device cannot be opened.
"""
super(SerialDataSource, self).__init__(callback)
port = port or self.DEFAULT_PORT
baudrate = baudrate or self.DEFAULT_BAUDRATE
try:
self.device = serial.Serial(port, baudrate, rtscts=True)
except serial.SerialException as e:
raise DataSourceError("Unable to open serial device at port "
"%s: %s" % (port, e))
else:
LOG.debug("Opened serial device at %s", port)
def _read(self):
return self.device.readline()
| Make sure LOG is defined before using it. | Make sure LOG is defined before using it.
| Python | bsd-3-clause | openxc/openxc-python,openxc/openxc-python,openxc/openxc-python |
fcfceb59cbd368ddaee87906d3f53f15bbb30072 | examples/tornado/auth_demo.py | examples/tornado/auth_demo.py | from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main])
| from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main], settings={'limits.buffer_size': 4 * 1024})
| Add the settings to the authdemo. | Add the settings to the authdemo. | Python | bsd-3-clause | solidrails/mongrel2,solidrails/mongrel2,solidrails/mongrel2,solidrails/mongrel2 |
f783d8ac4314923f1259208eb221c8874c03884a | calexicon/fn/iso.py | calexicon/fn/iso.py | from datetime import date as vanilla_date, timedelta
from overflow import OverflowDate
def iso_to_gregorian(year, week, weekday):
if week < 1 or week > 54:
raise ValueError(
"Week number %d is invalid for an ISO calendar."
% (week, )
)
jan_8 = vanilla_date(year, 1, 8).isocalendar()
offset = (week - jan_8[1]) * 7 + (weekday - jan_8[2])
try:
d = vanilla_date(year, 1, 8) + timedelta(days=offset)
except:
d = OverflowDate(isocalendar=(year, week, weekday))
if d.isocalendar()[0] != year:
raise ValueError(
"Week number %d is invalid for ISO year %d."
% (week, year)
)
return d
| from datetime import date as vanilla_date, timedelta
from overflow import OverflowDate
def _check_week(week):
if week < 1 or week > 54:
raise ValueError(
"Week number %d is invalid for an ISO calendar."
% (week, )
)
def iso_to_gregorian(year, week, weekday):
_check_week(week)
jan_8 = vanilla_date(year, 1, 8).isocalendar()
offset = (week - jan_8[1]) * 7 + (weekday - jan_8[2])
try:
d = vanilla_date(year, 1, 8) + timedelta(days=offset)
except:
d = OverflowDate(isocalendar=(year, week, weekday))
if d.isocalendar()[0] != year:
raise ValueError(
"Week number %d is invalid for ISO year %d."
% (week, year)
)
return d
| Move check for week number out into function. | Move check for week number out into function.
| Python | apache-2.0 | jwg4/qual,jwg4/calexicon |
11aab47e3c8c0d4044042aead7c01c990a152bea | tests/integration/customer/test_dispatcher.py | tests/integration/customer/test_dispatcher.py | from django.test import TestCase
from django.core import mail
from oscar.core.compat import get_user_model
from oscar.apps.customer.utils import Dispatcher
from oscar.apps.customer.models import CommunicationEventType
from oscar.test.factories import create_order
User = get_user_model()
class TestDispatcher(TestCase):
def test_sending_a_order_related_messages(self):
email = '[email protected]'
user = User.objects.create_user('testuser', email,
'somesimplepassword')
order_number = '12345'
order = create_order(number=order_number, user=user)
et = CommunicationEventType.objects.create(code="ORDER_PLACED",
name="Order Placed",
category="Order related")
messages = et.get_messages({
'order': order,
'lines': order.lines.all()
})
self.assertIn(order_number, messages['body'])
self.assertIn(order_number, messages['html'])
dispatcher = Dispatcher()
dispatcher.dispatch_order_messages(order, messages, et)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0]
self.assertIn(order_number, message.body)
| from django.test import TestCase
from django.core import mail
from oscar.core.compat import get_user_model
from oscar.apps.customer.utils import Dispatcher
from oscar.apps.customer.models import CommunicationEventType
from oscar.test.factories import create_order
User = get_user_model()
class TestDispatcher(TestCase):
def test_sending_a_order_related_messages(self):
email = '[email protected]'
user = User.objects.create_user('testuser', email,
'somesimplepassword')
order_number = '12345'
order = create_order(number=order_number, user=user)
et = CommunicationEventType.objects.create(code="ORDER_PLACED",
name="Order Placed",
category="Order related")
messages = et.get_messages({
'order': order,
'lines': order.lines.all()
})
self.assertIn(order_number, messages['body'])
self.assertIn(order_number, messages['html'])
dispatcher = Dispatcher()
dispatcher.dispatch_order_messages(order, messages, et)
self.assertEqual(len(mail.outbox), 1)
message = mail.outbox[0]
self.assertIn(order_number, message.body)
# test sending messages to emails without account and text body
messages.pop('body')
dispatcher.dispatch_direct_messages(email, messages)
self.assertEqual(len(mail.outbox), 2)
| Add tests for sending messages to emails without account. | Add tests for sending messages to emails without account.
| Python | bsd-3-clause | sonofatailor/django-oscar,solarissmoke/django-oscar,okfish/django-oscar,okfish/django-oscar,django-oscar/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,okfish/django-oscar,sonofatailor/django-oscar,okfish/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar |
ec7647c264bb702d4211779ef55ca5a694307faf | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: [email protected]
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.split('(')
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: [email protected]
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | Create documentation of DataSource Settings | : Create documentation of DataSource Settings
Task-Url: | Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 |
1bdf4e29daaf896fb6bf3416e0cae65cd8144e6f | falcon_hateoas/middleware.py | falcon_hateoas/middleware.py | import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def default(self, o):
# if isinstance(getattr(o, 'metadata'), sqlalchemy.schema.MetaData):
if issubclass(o.__class__,
sqlalchemy.ext.declarative.DeclarativeMeta):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
| import json
import decimal
import sqlalchemy
class AlchemyJSONEncoder(json.JSONEncoder):
def _is_alchemy_object(self, obj):
try:
sqlalchemy.orm.base.object_mapper(obj)
return True
except sqlalchemy.orm.exc.UnmappedInstanceError:
return False
def default(self, o):
if self._is_alchemy_object(o):
d = {}
for col in o.__table__.columns.keys():
if hasattr(getattr(o, col), 'isoformat'):
d[col] = getattr(o, col).isoformat()
elif isinstance(getattr(o, col), decimal.Decimal):
d[col] = float(getattr(o, col))
else:
d[col] = getattr(o, col)
return d
else:
return super(AlchemyJSONEncoder, self).default(o)
class JsonMiddleware:
def process_response(self, req, resp, resource):
resp.set_header('Content-Type', 'application/json; charset=utf-8')
resp.body = json.dumps(resp.body, cls=AlchemyJSONEncoder)
| Use SQLAlchemy object_mapper for testing Alchemy objects | Use SQLAlchemy object_mapper for testing Alchemy objects
Signed-off-by: Michal Juranyi <[email protected]>
| Python | mit | Vnet-as/falcon-hateoas |
c4103c00b51ddb9cb837d65b43c972505e533bdc | tilescraper.py | tilescraper.py | from PIL import Image
import json, StringIO, requests
import time
service = "http://dlss-dev-azaroth.stanford.edu/services/iiif/f1rc/"
resp = requests.get(service + "info.json")
js = json.loads(resp.text)
h = js['height']
w = js['width']
img = Image.new("RGB", (w,h), "white")
tilesize = 400
for x in range(w/tilesize+1):
for y in range(h/tilesize+1):
region = "%s,%s,%s,%s" % (x*tilesize, y*tilesize, tilesize, tilesize)
tileresp = requests.get(service + ("/%s/full/0/default.jpg" % region))
tile = Image.open(StringIO.StringIO(tileresp.content))
img.paste(tile, (x*tilesize,y*tilesize))
img.save("full.jpg")
| from PIL import Image
import json, StringIO, requests
import time
import robotparser
import re
host = "http://dlss-dev-azaroth.stanford.edu/"
service = host + "services/iiif/f1rc/"
resp = requests.get(service + "info.json")
js = json.loads(resp.text)
h = js['height']
w = js['width']
img = Image.new("RGB", (w,h), "white")
## Respect tile dimensions of server
tilesize = 1024
if js.has_key('tiles'):
tilesize = js['tiles']['width']
## Introduce baseline crawl delay
delay = 1
## Parse robots.txt
resp = requests.get(host + "/robots.txt")
if resp.status == 200:
parser = robotparser.RobotFileParser()
parser.parse(resp.text)
okay = parser.can_fetch("*", service)
if not okay:
print "Blocked by robots.txt"
sys.exit()
# No support for Crawl-delay extension ... just search
cd = re.compile("Crawl-delay: ([0-9]+)")
m = cd.search(resp.text)
if m:
delay = int(m.groups()[0])
for x in range(w/tilesize+1):
for y in range(h/tilesize+1):
region = "%s,%s,%s,%s" % (x*tilesize, y*tilesize, tilesize, tilesize)
tileresp = requests.get(service + ("/%s/full/0/default.jpg" % region))
tile = Image.open(StringIO.StringIO(tileresp.content))
img.paste(tile, (x*tilesize,y*tilesize))
sys.stdout.write('.')
sys.stdout.flush()
time.sleep(delay)
img.save("full.jpg")
| Add in good practices for crawling | Add in good practices for crawling
| Python | apache-2.0 | azaroth42/iiif-harvester |
7873017564570dd993a19648e3c07f0d2e79ec19 | dodocs/profiles/remove.py | dodocs/profiles/remove.py | """Create the profile.
Copyright (c) 2015 Francesco Montesano
MIT Licence
"""
import shutil
import dodocs.logger as dlog
import dodocs.utils as dutils
def remove(args):
"""Remove profile(s)
Parameters
----------
args : namespace
parsed command line arguments
"""
log = dlog.getLogger()
for name in args.name:
dlog.set_profile(name)
log.debug("Removing profile")
profile_dir = dutils.profile_dir(name)
if not profile_dir.exists():
log.warn("Profile does not exist")
continue
try:
if profile_dir.is_symlink():
realpath = profile_dir.resolve()
profile_dir.unlink()
shutil.rmtree(str(realpath))
else:
shutil.rmtree(str(profile_dir))
except FileNotFoundError:
log.error("The removal of profile failed", exc_info=True)
log.info("profile removed")
| """Remove the profiles.
Copyright (c) 2015 Francesco Montesano
MIT Licence
"""
import shutil
import dodocs.logger as dlog
import dodocs.utils as dutils
def remove(args):
"""Remove profile(s)
Parameters
----------
args : namespace
parsed command line arguments
"""
log = dlog.getLogger()
for name in args.name:
dlog.set_profile(name)
profile_dir = dutils.profile_dir(name)
if not profile_dir.exists():
log.warn("Profile does not exist")
continue
log.debug("Removing profile")
try:
if profile_dir.is_symlink():
realpath = profile_dir.resolve()
profile_dir.unlink()
shutil.rmtree(str(realpath))
else:
shutil.rmtree(str(profile_dir))
except FileNotFoundError:
log.error("The removal of profile failed", exc_info=True)
log.info("profile removed")
| Adjust logging and fix module documentation | Adjust logging and fix module documentation
| Python | mit | montefra/dodocs |
0ceadc93f0798bd04404cb18d077269f19111438 | nap/auth.py | nap/auth.py | from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return http.Forbidden()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(request, *args):
return request.user.groups.filter(name__in=args).exists()
return permit(
lambda self, *args, **kwargs: in_groups(self.request, *groups)
)
| from __future__ import unicode_literals
# Authentication and Authorisation
from functools import wraps
from . import http
def permit(test_func):
'''Decorate a handler to control access'''
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(self, *args, **kwargs):
if test_func(self, *args, **kwargs):
return view_func(self, *args, **kwargs)
return http.Forbidden()
return _wrapped_view
return decorator
permit_logged_in = permit(
lambda self, *args, **kwargs: self.request.user.is_authenticated()
)
permit_staff = permit(
lambda self, *args, **kwargs: self.request.user.is_staff
)
def permit_groups(*groups):
def in_groups(request, *args):
return request.user.groups.filter(name__in=groups).exists()
return permit(
lambda self, *args, **kwargs: in_groups(self.request, *groups)
)
| Fix wrong paramter name in permit_groups decorator | Fix wrong paramter name in permit_groups decorator
| Python | bsd-3-clause | MarkusH/django-nap,limbera/django-nap |
f3efb01c530db87f48d813b118f80a2ee1fd5996 | dthm4kaiako/users/apps.py | dthm4kaiako/users/apps.py | """Application configuration for the chapters application."""
from django.apps import AppConfig
class UsersAppConfig(AppConfig):
"""Configuration object for the chapters application."""
name = "users"
verbose_name = "Users"
def ready(self):
"""Import signals upon intialising application."""
import users.signals
| """Application configuration for the chapters application."""
from django.apps import AppConfig
class UsersAppConfig(AppConfig):
"""Configuration object for the chapters application."""
name = "users"
verbose_name = "Users"
def ready(self):
"""Import signals upon intialising application."""
import users.signals # noqa F401
| Exclude import from style checking | Exclude import from style checking
| Python | mit | uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers |
6454372da6550455735cbcb3a86a966e61c134a1 | elasticsearch/__init__.py | elasticsearch/__init__.py | from __future__ import absolute_import
VERSION = (0, 4, 3)
__version__ = VERSION
__versionstr__ = '.'.join(map(str, VERSION))
from elasticsearch.client import Elasticsearch
from elasticsearch.transport import Transport
from elasticsearch.connection_pool import ConnectionPool, ConnectionSelector, \
RoundRobinSelector
from elasticsearch.serializer import JSONSerializer
from elasticsearch.connection import Connection, RequestsHttpConnection, \
Urllib3HttpConnection, MemcachedConnection
from elasticsearch.exceptions import *
| from __future__ import absolute_import
VERSION = (0, 4, 3)
__version__ = VERSION
__versionstr__ = '.'.join(map(str, VERSION))
from elasticsearch.client import Elasticsearch
from elasticsearch.transport import Transport
from elasticsearch.connection_pool import ConnectionPool, ConnectionSelector, \
RoundRobinSelector
from elasticsearch.serializer import JSONSerializer
from elasticsearch.connection import Connection, RequestsHttpConnection, \
Urllib3HttpConnection, MemcachedConnection, ThriftConnection
from elasticsearch.exceptions import *
| Allow people to import ThriftConnection from elasticsearch package itself | Allow people to import ThriftConnection from elasticsearch package itself
| Python | apache-2.0 | veatch/elasticsearch-py,chrisseto/elasticsearch-py,Garrett-R/elasticsearch-py,brunobell/elasticsearch-py,tailhook/elasticsearch-py,AlexMaskovyak/elasticsearch-py,brunobell/elasticsearch-py,mjhennig/elasticsearch-py,thomdixon/elasticsearch-py,kelp404/elasticsearch-py,gardsted/elasticsearch-py,elastic/elasticsearch-py,elastic/elasticsearch-py,konradkonrad/elasticsearch-py,liuyi1112/elasticsearch-py,prinsherbert/elasticsearch-py |
4a37feed87efa3dd05e38ad6f85afe392afd3a16 | gitless/cli/gl_switch.py | gitless/cli/gl_switch.py | # -*- coding: utf-8 -*-
# Gitless - a version control system built on top of Git.
# Licensed under GNU GPL v2.
"""gl switch - Switch branches."""
from __future__ import unicode_literals
from clint.textui import colored
from . import pprint
def parser(subparsers, _):
"""Adds the switch parser to the given subparsers object."""
switch_parser = subparsers.add_parser(
'switch', help='switch branches')
switch_parser.add_argument('branch', help='switch to branch')
switch_parser.add_argument(
'-mo', '--move-over',
help='move uncomitted changes made in the current branch to the '
'destination branch',
action='store_true')
switch_parser.set_defaults(func=main)
def main(args, repo):
b = repo.lookup_branch(args.branch)
if not b:
pprint.err('Branch {0} doesn\'t exist'.format(colored.green(args.branch)))
pprint.err_exp('to list existing branches do gl branch')
return False
repo.switch_current_branch(b, move_over=args.move_over)
pprint.ok('Switched to branch {0}'.format(args.branch))
return True
| # -*- coding: utf-8 -*-
# Gitless - a version control system built on top of Git.
# Licensed under GNU GPL v2.
"""gl switch - Switch branches."""
from __future__ import unicode_literals
from clint.textui import colored
from . import pprint
def parser(subparsers, _):
"""Adds the switch parser to the given subparsers object."""
switch_parser = subparsers.add_parser(
'switch', help='switch branches')
switch_parser.add_argument('branch', help='switch to branch')
switch_parser.add_argument(
'-mo', '--move-over',
help='move uncomitted changes made in the current branch to the '
'destination branch',
action='store_true')
switch_parser.set_defaults(func=main)
def main(args, repo):
b = repo.lookup_branch(args.branch)
if not b:
pprint.err('Branch {0} doesn\'t exist'.format(args.branch))
pprint.err_exp('to list existing branches do gl branch')
return False
repo.switch_current_branch(b, move_over=args.move_over)
pprint.ok('Switched to branch {0}'.format(args.branch))
return True
| Fix ui bug in switch | Fix ui bug in switch
| Python | mit | sdg-mit/gitless,sdg-mit/gitless |
f46a0cdf869b8629a1e4a08105a065933d4199f9 | climlab/__init__.py | climlab/__init__.py | __version__ = '0.4.3.dev0'
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.domain.initial import column_state, surface_state
from climlab.process import Process, TimeDependentProcess, ImplicitProcess, DiagnosticProcess, EnergyBudget
from climlab.process import process_like, get_axes
| __version__ = '0.5.0.dev0'
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.domain.initial import column_state, surface_state
from climlab.process import Process, TimeDependentProcess, ImplicitProcess, DiagnosticProcess, EnergyBudget
from climlab.process import process_like, get_axes
| Increment version number to 0.5.0.dev0 | Increment version number to 0.5.0.dev0
| Python | mit | cjcardinale/climlab,cjcardinale/climlab,cjcardinale/climlab,brian-rose/climlab,brian-rose/climlab |
56446567f764625e88d8efdbfa2849e0a579d5c4 | indra/tests/test_rest_api.py | indra/tests/test_rest_api.py | import requests
from nose.plugins.attrib import attr
@attr('webservice')
def test_rest_api_responsive():
stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}'
url = 'http://ec2-54-88-146-250.compute-1.amazonaws.com:8080/' + \
'assemblers/cyjs'
res = requests.post(url, stmt_str)
assert res.status_code == 200
| import requests
from nose.plugins.attrib import attr
@attr('webservice')
def test_rest_api_responsive():
stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}'
url = 'http://indra-api-72031e2dfde08e09.elb.us-east-1.amazonaws.com:8000/' + \
'assemblers/cyjs'
res = requests.post(url, stmt_str)
assert res.status_code == 200
| Update REST API address in test | Update REST API address in test
| Python | bsd-2-clause | sorgerlab/belpy,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,pvtodorov/indra,bgyori/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,johnbachman/indra,johnbachman/indra |
4ec9d5a5a59c1526a846f6d88f1e43154e859fb7 | report_context/controllers/main.py | report_context/controllers/main.py | # Copyright 2019 Creu Blanca
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
import json
from odoo.http import request, route
from odoo.addons.web.controllers import main as report
class ReportController(report.ReportController):
@route()
def report_routes(self, reportname, docids=None, converter=None, **data):
report = request.env["ir.actions.report"]._get_report_from_name(reportname)
original_context = json.loads(data.get("context", "{}"))
data["context"] = json.dumps(
report.with_context(original_context)._get_context()
)
return super().report_routes(
reportname, docids=docids, converter=converter, **data
)
| # Copyright 2019 Creu Blanca
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
import json
from odoo.http import request, route
from odoo.addons.web.controllers import main as report
class ReportController(report.ReportController):
@route()
def report_routes(self, reportname, docids=None, converter=None, **data):
report = request.env["ir.actions.report"]._get_report_from_name(reportname)
original_context = json.loads(data.get("context", "{}") or "{}")
data["context"] = json.dumps(
report.with_context(original_context)._get_context()
)
return super().report_routes(
reportname, docids=docids, converter=converter, **data
)
| Fix json.loads when context is None | Fix json.loads when context is None
Co-authored-by: Pierre Verkest <[email protected]> | Python | agpl-3.0 | OCA/reporting-engine,OCA/reporting-engine,OCA/reporting-engine,OCA/reporting-engine |
6bdbbf4d5e100856acbaba1c5fc024a9f7f78718 | tests/tools.py | tests/tools.py | """
Test tools required by multiple suites.
"""
__author__ = 'mbach'
import contextlib
import shutil
import subprocess
import tempfile
@contextlib.contextmanager
def devpi_server(port=2414):
server_dir = tempfile.mkdtemp()
try:
subprocess.check_call(['devpi-server', '--start', '--serverdir={}'.format(server_dir), '--port={}'.format(port)])
try:
yield 'http://localhost:{}'.format(port)
finally:
subprocess.check_call(['devpi-server', '--stop', '--serverdir={}'.format(server_dir)])
finally:
shutil.rmtree(server_dir)
| """
Test tools required by multiple suites.
"""
__author__ = 'mbach'
import contextlib
import shutil
import subprocess
import tempfile
from brandon import devpi
@contextlib.contextmanager
def devpi_server(port=2414):
server_dir = tempfile.mkdtemp()
try:
subprocess.check_call(['devpi-server', '--start', '--serverdir={}'.format(server_dir), '--port={}'.format(port)])
try:
yield 'http://localhost:{}'.format(port)
finally:
subprocess.check_call(['devpi-server', '--stop', '--serverdir={}'.format(server_dir)])
finally:
shutil.rmtree(server_dir)
@contextlib.contextmanager
def devpi_index(server_url, user, index):
"""
Creates the given user and index, and cleans it afterwards.
Yields of tuple of index-url and password. The index is created without an upstream.
"""
password = 'foo'
devpi_client = devpi.Client(server_url)
devpi_client._execute('user', '-c', user, 'password=' + password)
devpi_client._execute('login', user, '--password=' + password)
devpi_client._execute('index', '-c', 'wheels', 'bases=')
yield '{}/{}/{}'.format(server_url, user, index), password
devpi_client._execute('index', '--delete', '/{}/{}'.format(user, index))
devpi_client._execute('user', user, '--delete')
| Test tool to create temporary devpi index. | Test tool to create temporary devpi index.
| Python | bsd-3-clause | tylerdave/devpi-builder |
8460b1249d1140234798b8b7e482b13cde173a1e | bluebottle/settings/jenkins.py | bluebottle/settings/jenkins.py | # NOTE: local.py must be an empty file when using this configuration.
from .defaults import *
# Put jenkins environment specific overrides below.
INSTALLED_APPS += ('django_jenkins',)
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = False
TEMPLATE_DEBUG = False
# Test all INSTALLED_APPS by default
PROJECT_APPS = list(INSTALLED_APPS)
# Some of these tests fail, and it's not our fault
# https://code.djangoproject.com/ticket/17966
PROJECT_APPS.remove('django.contrib.auth')
# https://github.com/django-extensions/django-extensions/issues/154
PROJECT_APPS.remove('django_extensions')
PROJECT_APPS.remove('django_extensions.tests')
# FIXME: We need to fix the django_polymorphic tests
PROJECT_APPS.remove('polymorphic')
# Disable pylint becasue it seems to be causing problems
JENKINS_TASKS = (
# 'django_jenkins.tasks.run_pylint',
'django_jenkins.tasks.with_coverage',
'django_jenkins.tasks.django_tests',
)
| # NOTE: local.py must be an empty file when using this configuration.
from .defaults import *
# Put jenkins environment specific overrides below.
INSTALLED_APPS += ('django_jenkins',)
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
DEBUG = False
TEMPLATE_DEBUG = False
# Test all INSTALLED_APPS by default
PROJECT_APPS = list(INSTALLED_APPS)
# Some of these tests fail, and it's not our fault
# https://code.djangoproject.com/ticket/17966
PROJECT_APPS.remove('django.contrib.auth')
# This app fails with a strange error:
# DatabaseError: no such table: django_comments
# Not sure what's going on so it's disabled for now.
PROJECT_APPS.remove('django.contrib.sites')
# https://github.com/django-extensions/django-extensions/issues/154
PROJECT_APPS.remove('django_extensions')
PROJECT_APPS.remove('django_extensions.tests')
# FIXME: We need to fix the django_polymorphic tests
PROJECT_APPS.remove('polymorphic')
# Disable pylint becasue it seems to be causing problems
JENKINS_TASKS = (
# 'django_jenkins.tasks.run_pylint',
'django_jenkins.tasks.with_coverage',
'django_jenkins.tasks.django_tests',
)
| Disable django.contrib.sites tests in Jenkins. | Disable django.contrib.sites tests in Jenkins.
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site |
21a6ddca55c8b5da70d806afa18f08ac20cb04c0 | src/zsl/interface/webservice/performers/method.py | src/zsl/interface/webservice/performers/method.py | """
:mod:`zsl.interface.webservice.performers.method`
-------------------------------------------------
.. moduleauthor:: Martin Babka
"""
from __future__ import unicode_literals
import logging
from importlib import import_module, reload
import sys
from zsl.router.method import get_method_packages
def call_exposers_in_method_packages():
for package in get_method_packages():
if package in sys.modules:
module = sys.modules[package]
if hasattr(module, '__reloader__'):
getattr(module, '__reloader__')()
else:
module = import_module(package)
msg = "Calling exposers in method package {}".format(package)
logging.getLogger(__name__).debug(msg)
if hasattr(module, '__exposer__'):
getattr(module, '__exposer__')()
| """
:mod:`zsl.interface.webservice.performers.method`
-------------------------------------------------
.. moduleauthor:: Martin Babka
"""
from __future__ import unicode_literals
import logging
from importlib import import_module
import sys
from zsl.router.method import get_method_packages
def call_exposers_in_method_packages():
for package in get_method_packages():
if package in sys.modules:
module = sys.modules[package]
if hasattr(module, '__reloader__'):
getattr(module, '__reloader__')()
else:
module = import_module(package)
msg = "Calling exposers in method package {}".format(package)
logging.getLogger(__name__).debug(msg)
if hasattr(module, '__exposer__'):
getattr(module, '__exposer__')()
| Remove the unused import and fix testing library | Remove the unused import and fix testing library
| Python | mit | AtteqCom/zsl,AtteqCom/zsl |
497990c526add919dc31965b0afd49d86ace49cf | models.py | models.py | import datetime
import mongoengine
from mongoengine.django import auth
from piplmesh.account import fields
class User(auth.User):
birthdate = fields.LimitedDateTimeField(upper_limit=datetime.datetime.today(), lower_limit=datetime.datetime.today() - datetime.timedelta(366 * 120))
gender = fields.GenderField()
language = fields.LanguageField()
facebook_id = mongoengine.IntField()
facebook_token = mongoengine.StringField(max_length=150)
| import datetime
import mongoengine
from mongoengine.django import auth
from piplmesh.account import fields
class User(auth.User):
birthdate = fields.LimitedDateTimeField(upper_limit=datetime.date.today(), lower_limit=datetime.date.today() - datetime.timedelta(366 * 120))
gender = fields.GenderField()
language = fields.LanguageField()
facebook_id = mongoengine.IntField()
facebook_token = mongoengine.StringField(max_length=150)
| Change date's limits format to datetime.date. | Change date's limits format to datetime.date.
| Python | agpl-3.0 | mitar/django-mongo-auth,mitar/django-mongo-auth,mitar/django-mongo-auth |
392bdf5845be19ece8f582f79caf2d09a0af0dfb | manage.py | manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "apps.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| #!/usr/bin/env python
# manage.py script of cronos
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "apps.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| Add header, needed for the upcoming changes in the update_cronos.sh script | Add header, needed for the upcoming changes in the update_cronos.sh
script
| Python | agpl-3.0 | LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr |
6891edfe6228654940808a93fd36bfa6d24ae935 | marionette/tor_browser_tests/test_screenshots.py | marionette/tor_browser_tests/test_screenshots.py | from marionette_driver import By
from marionette_driver.errors import MarionetteException
from marionette_harness import MarionetteTestCase
import testsuite
class Test(MarionetteTestCase):
def setUp(self):
MarionetteTestCase.setUp(self)
ts = testsuite.TestSuite()
self.ts = ts
self.URLs = [
'chrome://torlauncher/content/network-settings-wizard.xul',
];
def test_check_tpo(self):
marionette = self.marionette
with marionette.using_context('content'):
marionette.navigate("http://check.torproject.org")
self.ts.screenshot(marionette, full=True)
with marionette.using_context('content'):
for url in self.URLs:
marionette.navigate(url)
self.ts.screenshot(marionette)
| from marionette_driver import By
from marionette_driver.errors import MarionetteException
from marionette_harness import MarionetteTestCase
import testsuite
class Test(MarionetteTestCase):
def setUp(self):
MarionetteTestCase.setUp(self)
ts = testsuite.TestSuite()
self.ts = ts
self.URLs = [
'chrome://torlauncher/content/network-settings-wizard.xhtml',
];
def test_check_tpo(self):
marionette = self.marionette
with marionette.using_context('content'):
marionette.navigate("http://check.torproject.org")
self.ts.screenshot(marionette, full=True)
with marionette.using_context('content'):
for url in self.URLs:
marionette.navigate(url)
self.ts.screenshot(marionette)
| Fix url for screenshot test | Fix url for screenshot test
| Python | bsd-3-clause | boklm/tbb-testsuite,boklm/tbb-testsuite,boklm/tbb-testsuite,boklm/tbb-testsuite,boklm/tbb-testsuite |
6dfd6a4ae687dc9c7567c74a6c3ef3bd0f9dc5a1 | ci_scripts/buildLinuxWheels.py | ci_scripts/buildLinuxWheels.py | from subprocess import call, check_output
import sys
import os
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(sys.argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(sys.argv[1]).split())
from dropboxUpload import uploadAll
uploadAll(path)
| from subprocess import call, check_output
import sys
import os
isPython3 = sys.version_info.major == 3
# https://stackoverflow.com/a/3357357
command = 'git log --format=%B -n 1'.split()
out = check_output(command)
if b'build wheels' not in out.lower() or not isPython3:
exit(0)
path = os.path.abspath(sys.argv[1])
call('pip install cibuildwheel==0.7.0'.split())
call('cibuildwheel --output-dir {}'.format(sys.argv[1]).split())
call('pip install dropbox'.split())
from dropboxUpload import uploadAll
uploadAll(path)
| Fix build wheels and upload 5. | Fix build wheels and upload 5.
| Python | bsd-3-clause | jr-garcia/AssimpCy,jr-garcia/AssimpCy |
6d291571dca59243c0a92f9955776e1acd2e87da | falmer/content/queries.py | falmer/content/queries.py | import graphene
from django.http import Http404
from graphql import GraphQLError
from wagtail.core.models import Page
from . import types
class Query(graphene.ObjectType):
page = graphene.Field(types.Page, path=graphene.String())
all_pages = graphene.List(types.Page, path=graphene.String())
def resolve_page(self, info, **kwargs):
path = kwargs.get('path')
path = path[1:] if path.startswith('/') else path
path = path[:-1] if path.endswith('/') else path
root_page = info.context.site.root_page
try:
result = root_page.route(info.context, path.split('/'))
return result.page
except Http404:
raise GraphQLError(f'404: Page not found for {path}')
def resolve_all_pages(self, info):
return Page.objects.specific().live()
| import graphene
from django.http import Http404
from graphql import GraphQLError
from wagtail.core.models import Page
from . import types
class Query(graphene.ObjectType):
page = graphene.Field(types.Page, path=graphene.String())
all_pages = graphene.List(types.Page, path=graphene.String())
def resolve_page(self, info, **kwargs):
path = kwargs.get('path')
path = path[1:] if path.startswith('/') else path
path = path[:-1] if path.endswith('/') else path
root_page = info.context.site.root_page
try:
result = root_page.route(info.context, path.split('/'))
return result.page
except Http404:
return None
def resolve_all_pages(self, info):
return Page.objects.specific().live()
| Return empty result rather than graphql error | Return empty result rather than graphql error
| Python | mit | sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer |
819ed0ededbdc8ebe150e5ce9f507c9607e2b724 | greins/__init__.py | greins/__init__.py | version_info = (0, 2, 0)
__version__ = ".".join(map(str, version_info))
| version_info = (0, 1, 0)
__version__ = ".".join(map(str, version_info))
| Revert "bump version" -- Not ready to release yet | Revert "bump version" -- Not ready to release yet
This reverts commit 60e383ce1e4432c360e615598813e3b1747befb8.
| Python | mit | meebo/greins,meebo/greins,harrisonfeng/greins,harrisonfeng/greins |
af2885d2bc9d2dfefd39e5d1dab53da137c793c2 | builders/horizons_telnet.py | builders/horizons_telnet.py | #!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
with open(in_path) as f:
lines = f.read().split('\n')
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line + '\r\n')
data = tn.read_until('DUMMY PATTERN', 2.0)
print(data)
out.write(data)
out.flush()
if __name__ == '__main__':
try:
main('horizons-input.txt', 'horizons-output.txt')
except EOFError:
print
print('EOF')
| #!/usr/bin/env python2.7
#import argparse
from telnetlib import Telnet
def main(in_path, out_path):
with open(in_path) as f:
lines = f.read().split('\n')
tn = Telnet('horizons.jpl.nasa.gov', 6775)
out = open(out_path, 'w')
for line in lines:
print(repr(line))
tn.write(line + '\r\n')
data = tn.read_until('DUMMY PATTERN', 2.0)
print(data)
out.write(data)
out.flush()
if __name__ == '__main__':
try:
main('horizons_input.txt', 'horizons_output.txt')
except EOFError:
print
print('EOF')
| Fix filename error in HORIZONS telnet script | Fix filename error in HORIZONS telnet script
| Python | mit | GuidoBR/python-skyfield,exoanalytic/python-skyfield,ozialien/python-skyfield,exoanalytic/python-skyfield,skyfielders/python-skyfield,skyfielders/python-skyfield,ozialien/python-skyfield,GuidoBR/python-skyfield |
bf2502fc45854db8ce7666c9fa511d487eccfb2e | pavement.py | pavement.py | from paver.easy import task, needs, path, sh, cmdopts
from paver.setuputils import setup, install_distutils_tasks, find_package_data
from distutils.extension import Extension
from optparse import make_option
from Cython.Build import cythonize
import version
pyx_files = ['si_prefix/si_prefix.pyx']
ext_modules = [Extension(f[:-4].replace('/', '.'), [f],
extra_compile_args=['-O3'],
include_dirs=['cythrust'])
for f in pyx_files]
ext_modules = cythonize(ext_modules)
setup(name='si_prefix',
version=version.getVersion(),
description='Functions for formatting numbers according to SI standards.',
keywords='si prefix format number precision',
author='Christian Fobel',
url='https://github.com/cfobel/si_prefix',
license='GPL',
packages=['si_prefix'],
package_data=find_package_data('si_prefix', package='si_prefix',
only_in_packages=False),
ext_modules=ext_modules)
@task
@needs('build_ext', 'generate_setup', 'minilib', 'setuptools.command.sdist')
def sdist():
"""Overrides sdist to make sure that our setup.py is generated."""
pass
| from paver.easy import task, needs, path, sh, cmdopts
from paver.setuputils import setup, install_distutils_tasks, find_package_data
from distutils.extension import Extension
from optparse import make_option
from Cython.Build import cythonize
import version
pyx_files = ['si_prefix/si_prefix.pyx']
ext_modules = [Extension(f[:-4].replace('/', '.'), [f],
extra_compile_args=['-O3'],
include_dirs=['cythrust'])
for f in pyx_files]
ext_modules = cythonize(ext_modules)
setup(name='si-prefix',
version=version.getVersion(),
description='Functions for formatting numbers according to SI standards.',
keywords='si prefix format number precision',
author='Christian Fobel',
url='https://github.com/cfobel/si_prefix',
license='GPL',
packages=['si_prefix'],
package_data=find_package_data('si_prefix', package='si_prefix',
only_in_packages=False),
ext_modules=ext_modules)
@task
@needs('build_ext', 'generate_setup', 'minilib', 'setuptools.command.sdist')
def sdist():
"""Overrides sdist to make sure that our setup.py is generated."""
pass
| Rename package "si_prefix" to "si-prefix" | Rename package "si_prefix" to "si-prefix"
| Python | bsd-3-clause | cfobel/si-prefix |
d63905158f5148b07534e823d271326262369d42 | pavement.py | pavement.py | import os
import re
from paver.easy import *
from paver.setuputils import setup
def get_version():
"""
Grab the version from irclib.py.
"""
here = os.path.dirname(__file__)
irclib = os.path.join(here, 'irclib.py')
with open(irclib) as f:
content = f.read()
VERSION = eval(re.search('VERSION = (.*)', content).group(1))
VERSION = '.'.join(map(str, VERSION))
return VERSION
def read_long_description():
f = open('README')
try:
data = f.read()
finally:
f.close()
return data
setup(
name="python-irclib",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
version=get_version(),
py_modules=["irclib", "ircbot"],
author="Joel Rosdahl",
author_email="[email protected]",
maintainer="Jason R. Coombs",
maintainer_email="[email protected]",
url="http://python-irclib.sourceforge.net",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
],
)
@task
@needs('generate_setup', 'minilib', 'distutils.command.sdist')
def sdist():
"Override sdist to make sure the setup.py gets generated"
| import os
import re
from paver.easy import *
from paver.setuputils import setup
def get_version():
"""
Grab the version from irclib.py.
"""
here = os.path.dirname(__file__)
irclib = os.path.join(here, 'irclib.py')
with open(irclib) as f:
content = f.read()
VERSION = eval(re.search('VERSION = (.*)', content).group(1))
VERSION = '.'.join(map(str, VERSION))
return VERSION
def read_long_description():
with open('README') as f:
data = f.read()
return data
setup(
name="python-irclib",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
version=get_version(),
py_modules=["irclib", "ircbot"],
author="Joel Rosdahl",
author_email="[email protected]",
maintainer="Jason R. Coombs",
maintainer_email="[email protected]",
url="http://python-irclib.sourceforge.net",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
],
)
@task
@needs('generate_setup', 'minilib', 'distutils.command.sdist')
def sdist():
"Override sdist to make sure the setup.py gets generated"
| Use context manager to read README | Use context manager to read README
| Python | mit | jaraco/irc |
06d1039ccbf4653c2f285528b2ab058edca2ff1f | py/test/selenium/webdriver/common/proxy_tests.py | py/test/selenium/webdriver/common/proxy_tests.py | #!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'manual',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
| #!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'MANUAL',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
| Fix test as well :) | DanielWagnerHall: Fix test as well :)
r17825
| Python | apache-2.0 | misttechnologies/selenium,markodolancic/selenium,uchida/selenium,yukaReal/selenium,mestihudson/selenium,alb-i986/selenium,jabbrwcky/selenium,krmahadevan/selenium,jabbrwcky/selenium,AutomatedTester/selenium,s2oBCN/selenium,asolntsev/selenium,twalpole/selenium,o-schneider/selenium,jsakamoto/selenium,compstak/selenium,tkurnosova/selenium,carlosroh/selenium,rovner/selenium,temyers/selenium,thanhpete/selenium,chrisblock/selenium,blueyed/selenium,actmd/selenium,chrisblock/selenium,blackboarddd/selenium,MCGallaspy/selenium,krosenvold/selenium,oddui/selenium,freynaud/selenium,dcjohnson1989/selenium,carlosroh/selenium,5hawnknight/selenium,temyers/selenium,xmhubj/selenium,Herst/selenium,lummyare/lummyare-lummy,pulkitsinghal/selenium,dkentw/selenium,jerome-jacob/selenium,mach6/selenium,SevInf/IEDriver,anshumanchatterji/selenium,zenefits/selenium,asashour/selenium,juangj/selenium,blueyed/selenium,oddui/selenium,alb-i986/selenium,Herst/selenium,oddui/selenium,Ardesco/selenium,Jarob22/selenium,RamaraoDonta/ramarao-clone,aluedeke/chromedriver,lrowe/selenium,krmahadevan/selenium,Jarob22/selenium,valfirst/selenium,Ardesco/selenium,krosenvold/selenium,aluedeke/chromedriver,zenefits/selenium,jsarenik/jajomojo-selenium,AutomatedTester/selenium,krmahadevan/selenium,livioc/selenium,petruc/selenium,TikhomirovSergey/selenium,jknguyen/josephknguyen-selenium,dimacus/selenium,MCGallaspy/selenium,skurochkin/selenium,Appdynamics/selenium,jsakamoto/selenium,lmtierney/selenium,titusfortner/selenium,sevaseva/selenium,joshmgrant/selenium,valfirst/selenium,dibagga/selenium,DrMarcII/selenium,p0deje/selenium,amar-sharma/selenium,dimacus/selenium,jsarenik/jajomojo-selenium,vveliev/selenium,isaksky/selenium,gotcha/selenium,lummyare/lummyare-test,stupidnetizen/selenium,tkurnosova/selenium,temyers/selenium,oddui/selenium,5hawnknight/selenium,yukaReal/selenium,JosephCastro/selenium,arunsingh/selenium,TheBlackTuxCorp/selenium,slongwang/selenium,isaksky/selenium,MeetMe/selenium,asolntsev/selenium,pulkitsinghal/selenium,amikey/selenium,o-schneider/selenium,o-schneider/selenium,jsakamoto/selenium,oddui/selenium,bartolkaruza/selenium,gabrielsimas/selenium,doungni/selenium,wambat/selenium,gotcha/selenium,dandv/selenium,aluedeke/chromedriver,sebady/selenium,GorK-ChO/selenium,amar-sharma/selenium,gorlemik/selenium,xmhubj/selenium,anshumanchatterji/selenium,bmannix/selenium,knorrium/selenium,tarlabs/selenium,aluedeke/chromedriver,rovner/selenium,JosephCastro/selenium,joshbruning/selenium,sri85/selenium,i17c/selenium,Herst/selenium,kalyanjvn1/selenium,bayandin/selenium,juangj/selenium,uchida/selenium,mestihudson/selenium,Dude-X/selenium,AutomatedTester/selenium,carsonmcdonald/selenium,gurayinan/selenium,lukeis/selenium,tbeadle/selenium,jknguyen/josephknguyen-selenium,dimacus/selenium,markodolancic/selenium,pulkitsinghal/selenium,valfirst/selenium,livioc/selenium,customcommander/selenium,xsyntrex/selenium,mestihudson/selenium,markodolancic/selenium,twalpole/selenium,manuelpirez/selenium,isaksky/selenium,kalyanjvn1/selenium,compstak/selenium,dbo/selenium,sankha93/selenium,mach6/selenium,skurochkin/selenium,freynaud/selenium,dandv/selenium,alexec/selenium,alb-i986/selenium,asolntsev/selenium,DrMarcII/selenium,bayandin/selenium,sebady/selenium,chrsmithdemos/selenium,TikhomirovSergey/selenium,sri85/selenium,vveliev/selenium,lummyare/lummyare-lummy,sag-enorman/selenium,doungni/selenium,lilredindy/selenium,Sravyaksr/selenium,freynaud/selenium,anshumanchatterji/selenium,gurayinan/selenium,dimacus/selenium,lmtierney/selenium,telefonicaid/selenium,TikhomirovSergey/selenium,Dude-X/selenium,sag-enorman/selenium,blackboarddd/selenium,orange-tv-blagnac/selenium,chrsmithdemos/selenium,manuelpirez/selenium,oddui/selenium,MCGallaspy/selenium,slongwang/selenium,Jarob22/selenium,asolntsev/selenium,onedox/selenium,arunsingh/selenium,sri85/selenium,lilredindy/selenium,JosephCastro/selenium,Jarob22/selenium,amikey/selenium,TheBlackTuxCorp/selenium,dcjohnson1989/selenium,carlosroh/selenium,clavery/selenium,Ardesco/selenium,bartolkaruza/selenium,p0deje/selenium,vveliev/selenium,AutomatedTester/selenium,denis-vilyuzhanin/selenium-fastview,5hawnknight/selenium,compstak/selenium,asolntsev/selenium,lummyare/lummyare-test,vinay-qa/vinayit-android-server-apk,orange-tv-blagnac/selenium,livioc/selenium,joshmgrant/selenium,carsonmcdonald/selenium,zenefits/selenium,houchj/selenium,carlosroh/selenium,compstak/selenium,manuelpirez/selenium,lummyare/lummyare-test,joshbruning/selenium,aluedeke/chromedriver,minhthuanit/selenium,houchj/selenium,gotcha/selenium,meksh/selenium,AutomatedTester/selenium,compstak/selenium,bartolkaruza/selenium,dibagga/selenium,mach6/selenium,chrsmithdemos/selenium,JosephCastro/selenium,dibagga/selenium,xsyntrex/selenium,mach6/selenium,sevaseva/selenium,anshumanchatterji/selenium,davehunt/selenium,blackboarddd/selenium,vinay-qa/vinayit-android-server-apk,dbo/selenium,onedox/selenium,carlosroh/selenium,tbeadle/selenium,blackboarddd/selenium,SeleniumHQ/selenium,SevInf/IEDriver,xsyntrex/selenium,dimacus/selenium,MCGallaspy/selenium,onedox/selenium,actmd/selenium,BlackSmith/selenium,tkurnosova/selenium,sag-enorman/selenium,stupidnetizen/selenium,meksh/selenium,lummyare/lummyare-lummy,gregerrag/selenium,krmahadevan/selenium,AutomatedTester/selenium,SevInf/IEDriver,sankha93/selenium,s2oBCN/selenium,5hawnknight/selenium,mojwang/selenium,RamaraoDonta/ramarao-clone,s2oBCN/selenium,mojwang/selenium,gabrielsimas/selenium,denis-vilyuzhanin/selenium-fastview,rplevka/selenium,o-schneider/selenium,blueyed/selenium,actmd/selenium,eric-stanley/selenium,joshbruning/selenium,sevaseva/selenium,bmannix/selenium,dcjohnson1989/selenium,davehunt/selenium,dbo/selenium,Appdynamics/selenium,JosephCastro/selenium,xmhubj/selenium,lukeis/selenium,rrussell39/selenium,slongwang/selenium,Dude-X/selenium,dibagga/selenium,MCGallaspy/selenium,TheBlackTuxCorp/selenium,dimacus/selenium,Herst/selenium,i17c/selenium,TheBlackTuxCorp/selenium,kalyanjvn1/selenium,lrowe/selenium,o-schneider/selenium,krmahadevan/selenium,dkentw/selenium,amikey/selenium,chrsmithdemos/selenium,sevaseva/selenium,Sravyaksr/selenium,davehunt/selenium,uchida/selenium,lummyare/lummyare-test,MeetMe/selenium,lilredindy/selenium,5hawnknight/selenium,krosenvold/selenium,gabrielsimas/selenium,stupidnetizen/selenium,manuelpirez/selenium,SouWilliams/selenium,doungni/selenium,amikey/selenium,livioc/selenium,minhthuanit/selenium,BlackSmith/selenium,lrowe/selenium,RamaraoDonta/ramarao-clone,gemini-testing/selenium,lmtierney/selenium,onedox/selenium,Appdynamics/selenium,customcommander/selenium,xsyntrex/selenium,compstak/selenium,zenefits/selenium,titusfortner/selenium,SeleniumHQ/selenium,rovner/selenium,twalpole/selenium,orange-tv-blagnac/selenium,Sravyaksr/selenium,jknguyen/josephknguyen-selenium,SevInf/IEDriver,bartolkaruza/selenium,Dude-X/selenium,onedox/selenium,TikhomirovSergey/selenium,jsakamoto/selenium,p0deje/selenium,meksh/selenium,oddui/selenium,isaksky/selenium,stupidnetizen/selenium,stupidnetizen/selenium,titusfortner/selenium,SeleniumHQ/selenium,vveliev/selenium,dibagga/selenium,joshmgrant/selenium,lilredindy/selenium,mestihudson/selenium,blackboarddd/selenium,lrowe/selenium,tarlabs/selenium,gregerrag/selenium,blueyed/selenium,jabbrwcky/selenium,vveliev/selenium,TheBlackTuxCorp/selenium,lilredindy/selenium,valfirst/selenium,Tom-Trumper/selenium,temyers/selenium,meksh/selenium,freynaud/selenium,TikhomirovSergey/selenium,clavery/selenium,temyers/selenium,sag-enorman/selenium,Appdynamics/selenium,asashour/selenium,SeleniumHQ/selenium,gabrielsimas/selenium,mojwang/selenium,blueyed/selenium,Dude-X/selenium,alb-i986/selenium,SouWilliams/selenium,petruc/selenium,s2oBCN/selenium,bmannix/selenium,davehunt/selenium,lummyare/lummyare-lummy,jerome-jacob/selenium,p0deje/selenium,gabrielsimas/selenium,doungni/selenium,rovner/selenium,oddui/selenium,sri85/selenium,mach6/selenium,onedox/selenium,lmtierney/selenium,vinay-qa/vinayit-android-server-apk,dandv/selenium,TikhomirovSergey/selenium,arunsingh/selenium,valfirst/selenium,rplevka/selenium,lilredindy/selenium,gorlemik/selenium,gorlemik/selenium,rrussell39/selenium,eric-stanley/selenium,gemini-testing/selenium,bayandin/selenium,misttechnologies/selenium,gurayinan/selenium,p0deje/selenium,houchj/selenium,RamaraoDonta/ramarao-clone,tarlabs/selenium,lilredindy/selenium,Herst/selenium,gregerrag/selenium,houchj/selenium,bayandin/selenium,dandv/selenium,SouWilliams/selenium,amikey/selenium,clavery/selenium,chrsmithdemos/selenium,SeleniumHQ/selenium,thanhpete/selenium,xsyntrex/selenium,arunsingh/selenium,Tom-Trumper/selenium,tbeadle/selenium,customcommander/selenium,mach6/selenium,zenefits/selenium,pulkitsinghal/selenium,jsarenik/jajomojo-selenium,mojwang/selenium,carsonmcdonald/selenium,zenefits/selenium,sri85/selenium,dcjohnson1989/selenium,livioc/selenium,Tom-Trumper/selenium,sankha93/selenium,clavery/selenium,kalyanjvn1/selenium,gemini-testing/selenium,xmhubj/selenium,soundcloud/selenium,temyers/selenium,dcjohnson1989/selenium,tarlabs/selenium,DrMarcII/selenium,Jarob22/selenium,eric-stanley/selenium,alexec/selenium,carsonmcdonald/selenium,anshumanchatterji/selenium,quoideneuf/selenium,i17c/selenium,yukaReal/selenium,bmannix/selenium,amar-sharma/selenium,actmd/selenium,bartolkaruza/selenium,gregerrag/selenium,gemini-testing/selenium,Sravyaksr/selenium,mojwang/selenium,krmahadevan/selenium,joshmgrant/selenium,lrowe/selenium,rovner/selenium,blueyed/selenium,asashour/selenium,s2oBCN/selenium,kalyanjvn1/selenium,lummyare/lummyare-lummy,tarlabs/selenium,markodolancic/selenium,chrsmithdemos/selenium,alexec/selenium,minhthuanit/selenium,asashour/selenium,misttechnologies/selenium,titusfortner/selenium,xsyntrex/selenium,i17c/selenium,skurochkin/selenium,manuelpirez/selenium,quoideneuf/selenium,slongwang/selenium,HtmlUnit/selenium,isaksky/selenium,compstak/selenium,BlackSmith/selenium,Appdynamics/selenium,wambat/selenium,tbeadle/selenium,dimacus/selenium,SeleniumHQ/selenium,xmhubj/selenium,DrMarcII/selenium,lrowe/selenium,joshbruning/selenium,joshbruning/selenium,MCGallaspy/selenium,bayandin/selenium,joshmgrant/selenium,bmannix/selenium,JosephCastro/selenium,s2oBCN/selenium,MCGallaspy/selenium,slongwang/selenium,sankha93/selenium,tkurnosova/selenium,isaksky/selenium,juangj/selenium,krmahadevan/selenium,GorK-ChO/selenium,minhthuanit/selenium,joshuaduffy/selenium,rrussell39/selenium,uchida/selenium,chrsmithdemos/selenium,davehunt/selenium,Jarob22/selenium,titusfortner/selenium,rplevka/selenium,Dude-X/selenium,minhthuanit/selenium,lummyare/lummyare-lummy,freynaud/selenium,RamaraoDonta/ramarao-clone,dandv/selenium,Herst/selenium,amar-sharma/selenium,bartolkaruza/selenium,s2oBCN/selenium,o-schneider/selenium,isaksky/selenium,knorrium/selenium,MeetMe/selenium,dibagga/selenium,onedox/selenium,bmannix/selenium,DrMarcII/selenium,alexec/selenium,HtmlUnit/selenium,rovner/selenium,mestihudson/selenium,customcommander/selenium,asashour/selenium,orange-tv-blagnac/selenium,GorK-ChO/selenium,skurochkin/selenium,telefonicaid/selenium,joshbruning/selenium,Sravyaksr/selenium,rplevka/selenium,houchj/selenium,isaksky/selenium,gemini-testing/selenium,vveliev/selenium,gabrielsimas/selenium,titusfortner/selenium,arunsingh/selenium,soundcloud/selenium,knorrium/selenium,rovner/selenium,sri85/selenium,chrisblock/selenium,titusfortner/selenium,blueyed/selenium,quoideneuf/selenium,dcjohnson1989/selenium,mestihudson/selenium,jknguyen/josephknguyen-selenium,jsakamoto/selenium,minhthuanit/selenium,Jarob22/selenium,sri85/selenium,quoideneuf/selenium,slongwang/selenium,rrussell39/selenium,GorK-ChO/selenium,Herst/selenium,Tom-Trumper/selenium,jknguyen/josephknguyen-selenium,dimacus/selenium,kalyanjvn1/selenium,gabrielsimas/selenium,thanhpete/selenium,davehunt/selenium,i17c/selenium,tkurnosova/selenium,BlackSmith/selenium,sankha93/selenium,rplevka/selenium,yukaReal/selenium,i17c/selenium,krosenvold/selenium,alb-i986/selenium,lummyare/lummyare-test,sebady/selenium,thanhpete/selenium,xmhubj/selenium,carsonmcdonald/selenium,DrMarcII/selenium,livioc/selenium,soundcloud/selenium,wambat/selenium,vinay-qa/vinayit-android-server-apk,joshmgrant/selenium,lrowe/selenium,twalpole/selenium,markodolancic/selenium,quoideneuf/selenium,davehunt/selenium,jknguyen/josephknguyen-selenium,gorlemik/selenium,RamaraoDonta/ramarao-clone,jsarenik/jajomojo-selenium,uchida/selenium,pulkitsinghal/selenium,customcommander/selenium,joshuaduffy/selenium,mach6/selenium,dkentw/selenium,krmahadevan/selenium,houchj/selenium,blueyed/selenium,sri85/selenium,eric-stanley/selenium,dcjohnson1989/selenium,SevInf/IEDriver,gotcha/selenium,jsakamoto/selenium,amar-sharma/selenium,gemini-testing/selenium,dandv/selenium,alb-i986/selenium,rrussell39/selenium,knorrium/selenium,lmtierney/selenium,minhthuanit/selenium,anshumanchatterji/selenium,gotcha/selenium,Ardesco/selenium,vveliev/selenium,asashour/selenium,gotcha/selenium,Tom-Trumper/selenium,alexec/selenium,tbeadle/selenium,orange-tv-blagnac/selenium,manuelpirez/selenium,xsyntrex/selenium,soundcloud/selenium,JosephCastro/selenium,i17c/selenium,yukaReal/selenium,jsarenik/jajomojo-selenium,titusfortner/selenium,actmd/selenium,jerome-jacob/selenium,vinay-qa/vinayit-android-server-apk,SeleniumHQ/selenium,jknguyen/josephknguyen-selenium,dcjohnson1989/selenium,sag-enorman/selenium,twalpole/selenium,petruc/selenium,gurayinan/selenium,gurayinan/selenium,HtmlUnit/selenium,livioc/selenium,jabbrwcky/selenium,gregerrag/selenium,TheBlackTuxCorp/selenium,sevaseva/selenium,MeetMe/selenium,lummyare/lummyare-test,HtmlUnit/selenium,juangj/selenium,juangj/selenium,valfirst/selenium,o-schneider/selenium,freynaud/selenium,amar-sharma/selenium,HtmlUnit/selenium,dkentw/selenium,SouWilliams/selenium,tkurnosova/selenium,SeleniumHQ/selenium,Ardesco/selenium,arunsingh/selenium,vveliev/selenium,5hawnknight/selenium,chrsmithdemos/selenium,Appdynamics/selenium,Tom-Trumper/selenium,stupidnetizen/selenium,jerome-jacob/selenium,rrussell39/selenium,gabrielsimas/selenium,minhthuanit/selenium,actmd/selenium,bartolkaruza/selenium,petruc/selenium,mestihudson/selenium,Sravyaksr/selenium,HtmlUnit/selenium,doungni/selenium,gorlemik/selenium,sevaseva/selenium,joshmgrant/selenium,chrisblock/selenium,sankha93/selenium,mojwang/selenium,bmannix/selenium,soundcloud/selenium,DrMarcII/selenium,HtmlUnit/selenium,lmtierney/selenium,dkentw/selenium,joshmgrant/selenium,arunsingh/selenium,meksh/selenium,titusfortner/selenium,pulkitsinghal/selenium,doungni/selenium,skurochkin/selenium,petruc/selenium,dbo/selenium,jsarenik/jajomojo-selenium,gregerrag/selenium,xmhubj/selenium,tbeadle/selenium,Dude-X/selenium,sankha93/selenium,manuelpirez/selenium,joshbruning/selenium,TheBlackTuxCorp/selenium,Appdynamics/selenium,doungni/selenium,jerome-jacob/selenium,asolntsev/selenium,sebady/selenium,5hawnknight/selenium,sri85/selenium,carlosroh/selenium,juangj/selenium,lilredindy/selenium,gorlemik/selenium,DrMarcII/selenium,telefonicaid/selenium,sebady/selenium,i17c/selenium,xsyntrex/selenium,knorrium/selenium,SouWilliams/selenium,juangj/selenium,gurayinan/selenium,chrsmithdemos/selenium,livioc/selenium,sankha93/selenium,bayandin/selenium,jsarenik/jajomojo-selenium,anshumanchatterji/selenium,customcommander/selenium,arunsingh/selenium,tbeadle/selenium,tbeadle/selenium,telefonicaid/selenium,tkurnosova/selenium,orange-tv-blagnac/selenium,xmhubj/selenium,zenefits/selenium,thanhpete/selenium,jabbrwcky/selenium,carlosroh/selenium,misttechnologies/selenium,gorlemik/selenium,lummyare/lummyare-test,markodolancic/selenium,TikhomirovSergey/selenium,eric-stanley/selenium,SouWilliams/selenium,BlackSmith/selenium,sag-enorman/selenium,krosenvold/selenium,rrussell39/selenium,dandv/selenium,Sravyaksr/selenium,joshuaduffy/selenium,aluedeke/chromedriver,gorlemik/selenium,anshumanchatterji/selenium,asolntsev/selenium,GorK-ChO/selenium,chrisblock/selenium,HtmlUnit/selenium,rovner/selenium,valfirst/selenium,slongwang/selenium,Ardesco/selenium,yukaReal/selenium,krosenvold/selenium,gotcha/selenium,MeetMe/selenium,quoideneuf/selenium,compstak/selenium,SevInf/IEDriver,p0deje/selenium,uchida/selenium,gabrielsimas/selenium,blackboarddd/selenium,joshuaduffy/selenium,mojwang/selenium,SeleniumHQ/selenium,p0deje/selenium,dibagga/selenium,compstak/selenium,misttechnologies/selenium,joshbruning/selenium,MCGallaspy/selenium,vinay-qa/vinayit-android-server-apk,rplevka/selenium,carsonmcdonald/selenium,dibagga/selenium,orange-tv-blagnac/selenium,sankha93/selenium,livioc/selenium,GorK-ChO/selenium,clavery/selenium,orange-tv-blagnac/selenium,clavery/selenium,tarlabs/selenium,kalyanjvn1/selenium,mestihudson/selenium,telefonicaid/selenium,tkurnosova/selenium,actmd/selenium,TheBlackTuxCorp/selenium,lilredindy/selenium,dkentw/selenium,denis-vilyuzhanin/selenium-fastview,alexec/selenium,meksh/selenium,clavery/selenium,asashour/selenium,vinay-qa/vinayit-android-server-apk,lmtierney/selenium,o-schneider/selenium,dkentw/selenium,carlosroh/selenium,telefonicaid/selenium,thanhpete/selenium,gurayinan/selenium,5hawnknight/selenium,alb-i986/selenium,jabbrwcky/selenium,arunsingh/selenium,bayandin/selenium,onedox/selenium,dkentw/selenium,petruc/selenium,petruc/selenium,freynaud/selenium,rrussell39/selenium,MCGallaspy/selenium,quoideneuf/selenium,Sravyaksr/selenium,jabbrwcky/selenium,juangj/selenium,Ardesco/selenium,rplevka/selenium,asashour/selenium,GorK-ChO/selenium,jsakamoto/selenium,markodolancic/selenium,joshuaduffy/selenium,pulkitsinghal/selenium,zenefits/selenium,s2oBCN/selenium,anshumanchatterji/selenium,BlackSmith/selenium,misttechnologies/selenium,misttechnologies/selenium,bayandin/selenium,gotcha/selenium,misttechnologies/selenium,mach6/selenium,xmhubj/selenium,chrisblock/selenium,skurochkin/selenium,RamaraoDonta/ramarao-clone,rplevka/selenium,sag-enorman/selenium,lukeis/selenium,meksh/selenium,telefonicaid/selenium,asashour/selenium,sebady/selenium,eric-stanley/selenium,petruc/selenium,Tom-Trumper/selenium,slongwang/selenium,AutomatedTester/selenium,jknguyen/josephknguyen-selenium,yukaReal/selenium,denis-vilyuzhanin/selenium-fastview,sag-enorman/selenium,i17c/selenium,joshmgrant/selenium,thanhpete/selenium,bartolkaruza/selenium,gregerrag/selenium,denis-vilyuzhanin/selenium-fastview,mach6/selenium,SouWilliams/selenium,lukeis/selenium,jsarenik/jajomojo-selenium,Herst/selenium,jabbrwcky/selenium,uchida/selenium,twalpole/selenium,carsonmcdonald/selenium,orange-tv-blagnac/selenium,knorrium/selenium,tkurnosova/selenium,stupidnetizen/selenium,gotcha/selenium,gregerrag/selenium,stupidnetizen/selenium,chrisblock/selenium,alexec/selenium,skurochkin/selenium,asolntsev/selenium,markodolancic/selenium,dbo/selenium,5hawnknight/selenium,telefonicaid/selenium,SeleniumHQ/selenium,davehunt/selenium,wambat/selenium,vinay-qa/vinayit-android-server-apk,alb-i986/selenium,tarlabs/selenium,asolntsev/selenium,amikey/selenium,isaksky/selenium,temyers/selenium,jknguyen/josephknguyen-selenium,jerome-jacob/selenium,dandv/selenium,lummyare/lummyare-lummy,amar-sharma/selenium,twalpole/selenium,houchj/selenium,joshmgrant/selenium,HtmlUnit/selenium,jerome-jacob/selenium,jsakamoto/selenium,Dude-X/selenium,jabbrwcky/selenium,BlackSmith/selenium,sebady/selenium,dibagga/selenium,blueyed/selenium,jsarenik/jajomojo-selenium,Ardesco/selenium,joshuaduffy/selenium,minhthuanit/selenium,pulkitsinghal/selenium,lummyare/lummyare-test,bmannix/selenium,MeetMe/selenium,MeetMe/selenium,soundcloud/selenium,aluedeke/chromedriver,thanhpete/selenium,alb-i986/selenium,gurayinan/selenium,rovner/selenium,lukeis/selenium,dbo/selenium,amikey/selenium,wambat/selenium,denis-vilyuzhanin/selenium-fastview,o-schneider/selenium,juangj/selenium,quoideneuf/selenium,twalpole/selenium,DrMarcII/selenium,vinay-qa/vinayit-android-server-apk,gemini-testing/selenium,SeleniumHQ/selenium,temyers/selenium,actmd/selenium,bartolkaruza/selenium,Appdynamics/selenium,meksh/selenium,alexec/selenium,mojwang/selenium,SouWilliams/selenium,denis-vilyuzhanin/selenium-fastview,lummyare/lummyare-lummy,valfirst/selenium,tarlabs/selenium,wambat/selenium,bayandin/selenium,krmahadevan/selenium,AutomatedTester/selenium,GorK-ChO/selenium,skurochkin/selenium,petruc/selenium,actmd/selenium,titusfortner/selenium,vveliev/selenium,chrisblock/selenium,freynaud/selenium,knorrium/selenium,skurochkin/selenium,dcjohnson1989/selenium,RamaraoDonta/ramarao-clone,sevaseva/selenium,houchj/selenium,gurayinan/selenium,krosenvold/selenium,lummyare/lummyare-lummy,blackboarddd/selenium,freynaud/selenium,TheBlackTuxCorp/selenium,Tom-Trumper/selenium,blackboarddd/selenium,jsakamoto/selenium,Appdynamics/selenium,customcommander/selenium,rrussell39/selenium,lukeis/selenium,kalyanjvn1/selenium,amikey/selenium,manuelpirez/selenium,dandv/selenium,aluedeke/chromedriver,wambat/selenium,RamaraoDonta/ramarao-clone,SevInf/IEDriver,SevInf/IEDriver,dbo/selenium,doungni/selenium,lmtierney/selenium,SouWilliams/selenium,Jarob22/selenium,blackboarddd/selenium,soundcloud/selenium,sebady/selenium,carsonmcdonald/selenium,oddui/selenium,twalpole/selenium,knorrium/selenium,tbeadle/selenium,TikhomirovSergey/selenium,xsyntrex/selenium,p0deje/selenium,meksh/selenium,manuelpirez/selenium,knorrium/selenium,uchida/selenium,aluedeke/chromedriver,sag-enorman/selenium,davehunt/selenium,Tom-Trumper/selenium,lukeis/selenium,eric-stanley/selenium,AutomatedTester/selenium,jerome-jacob/selenium,valfirst/selenium,wambat/selenium,customcommander/selenium,joshuaduffy/selenium,Sravyaksr/selenium,clavery/selenium,Herst/selenium,dbo/selenium,p0deje/selenium,doungni/selenium,dbo/selenium,MeetMe/selenium,lukeis/selenium,clavery/selenium,BlackSmith/selenium,HtmlUnit/selenium,eric-stanley/selenium,joshmgrant/selenium,valfirst/selenium,lrowe/selenium,sevaseva/selenium,valfirst/selenium,joshuaduffy/selenium,customcommander/selenium,soundcloud/selenium,MeetMe/selenium,gregerrag/selenium,temyers/selenium,JosephCastro/selenium,pulkitsinghal/selenium,dkentw/selenium,quoideneuf/selenium,joshbruning/selenium,TikhomirovSergey/selenium,carsonmcdonald/selenium,telefonicaid/selenium,wambat/selenium,yukaReal/selenium,slongwang/selenium,sebady/selenium,stupidnetizen/selenium,eric-stanley/selenium,dimacus/selenium,joshuaduffy/selenium,denis-vilyuzhanin/selenium-fastview,onedox/selenium,amar-sharma/selenium,GorK-ChO/selenium,rplevka/selenium,titusfortner/selenium,soundcloud/selenium,mojwang/selenium,lrowe/selenium,lummyare/lummyare-test,denis-vilyuzhanin/selenium-fastview,krosenvold/selenium,bmannix/selenium,kalyanjvn1/selenium,misttechnologies/selenium,Jarob22/selenium,JosephCastro/selenium,gorlemik/selenium,Ardesco/selenium,thanhpete/selenium,gemini-testing/selenium,carlosroh/selenium,gemini-testing/selenium,lmtierney/selenium,alexec/selenium,BlackSmith/selenium,lukeis/selenium,markodolancic/selenium,houchj/selenium,yukaReal/selenium,jerome-jacob/selenium,krosenvold/selenium,zenefits/selenium,uchida/selenium,amar-sharma/selenium,SevInf/IEDriver,sevaseva/selenium,chrisblock/selenium,amikey/selenium,mestihudson/selenium,tarlabs/selenium,s2oBCN/selenium,Dude-X/selenium |
6453baefa8c2f6ab9841efd3961da0a65aaa688f | test/test_packages.py | test/test_packages.py | import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pavucontrol"),
("pinta"),
("pulseaudio"),
("pulseaudio-module-x11"),
("pulseaudio-utils"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(host, name):
pkg = host.package(name)
assert pkg.is_installed
| import pytest
@pytest.mark.parametrize("name", [
("apt-file"),
("apt-transport-https"),
("atom"),
("blktrace"),
("ca-certificates"),
("chromium-browser"),
("cron"),
("curl"),
("diod"),
("docker-ce"),
("fonts-font-awesome"),
("git"),
("gnupg"),
("gnupg2"),
("gnupg-agent"),
("handbrake"),
("handbrake-cli"),
("haveged"),
("htop"),
("i3"),
("iotop"),
("language-pack-en-base"),
("laptop-mode-tools"),
("nfs-common"),
("ntop"),
("ntp"),
("openssh-client"),
("openssh-server"),
("openssh-sftp-server"),
("openssl"),
("pavucontrol"),
("pinta"),
("pulseaudio"),
("pulseaudio-module-x11"),
("pulseaudio-utils"),
("python"),
("python-pip"),
("scrot"),
("software-properties-common"),
("suckless-tools"),
("sysdig"),
("sysstat"),
("tree"),
("vagrant"),
("vim"),
("virtualbox"),
("vlc"),
("wget"),
("whois"),
("x264"),
("xfce4-terminal"),
("xfonts-terminus"),
("xinit"),
])
def test_packages(host, name):
pkg = host.package(name)
assert pkg.is_installed
| Add a test for sysdig | Add a test for sysdig
| Python | mit | wicksy/laptop-build,wicksy/laptop-build,wicksy/laptop-build,wicksy/laptop-build |
ff50b3e43de0c083cd8c3daaa7644394daadc1a0 | test_passwd_change.py | test_passwd_change.py | #!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv', 'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
| #!/usr/bin/env python3
from passwd_change import passwd_change, shadow_change, mails_delete
from unittest import TestCase, TestLoader, TextTestRunner
import os
import subprocess
class PasswdChange_Test(TestCase):
def setUp(self):
"""
Preconditions
"""
subprocess.call(['mkdir', 'test'])
subprocess.call(['touch', 'test/rvv', 'test/max',
'test/bdv', 'test/mail'])
#TODO create passwd test file
#TODO create shadow test file
#TODO create keys.txt file
def tearDown(self):
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
subprocess.call(['rm', '-r', 'test/'])
def test_passwd_change(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test')
if os.path.exists('test/rvv'):
raise Exception('test/rvv must not exist')
if not (os.path.exists('test/max') and
os.path.exists('test/bdv') and
os.path.exists('test/mail')):
raise Exception('File max, bdv or mail must exist!')
def test_passwd_change_2(self):
shadow_change(*passwd_change())
mails_delete(maildir_path='test/')
suite = TestLoader().loadTestsFromTestCase(PasswdChange_Test)
TextTestRunner(verbosity=2).run(suite)
| Add tearDown() - remove test dir, test files existing and not existing. | Add tearDown() - remove test dir, test files existing and not existing.
| Python | mit | maxsocl/oldmailer |
8db4213d20486a60abda1ba486438f54c3b830c0 | ci_scripts/installPandoc.py | ci_scripts/installPandoc.py | import os
from subprocess import call, check_output
import sys
from shutil import copy2
def checkAndInstall():
try:
check_output('pandoc -v'.split())
except OSError:
cudir = os.path.abspath(os.curdir)
os.chdir('downloads')
from requests import get
pandocFile = 'pandoc-2.1.3-linux.tar.gz'
with open(pandocFile, "wb") as file:
response = get('https://github.com/jgm/pandoc/releases/download/2.1.3/pandoc-2.1.3-linux.tar.gz')
file.write(response.content)
call("tar -xvzf {}".format(pandocFile).split())
copy2('./pandoc-2.1.3/bin/pandoc', '/usr/local/bin')
copy2('./pandoc-2.1.3/bin/pandoc-citeproc', '/usr/local/bin')
os.chdir(cudir)
| import os
from subprocess import call, check_output
import sys
from shutil import copy2
platform = sys.platform
def checkAndInstall():
try:
check_output('pandoc -v'.split())
except OSError:
cudir = os.path.abspath(os.curdir)
os.chdir(os.path.abspath(os.path.join(os.path.pardir, 'downloads')))
def getFile():
from requests import get
with open(pandocFile, "wb") as file:
response = get(source)
file.write(response.content)
if platform == 'win32':
pandocFile = 'pandoc-2.1.3-windows.msi'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call('msiexec.exe /i "{}" /norestart'.format(pandocFile))
else:
pandocFile = 'pandoc-2.1.3-linux.tar.gz'
source = 'https://github.com/jgm/pandoc/releases/download/2.1.3/' + pandocFile
getFile()
call("tar -xvzf {}".format(pandocFile).split())
copy2('./pandoc-2.1.3/bin/pandoc', '/usr/local/bin')
copy2('./pandoc-2.1.3/bin/pandoc-citeproc', '/usr/local/bin')
os.chdir(cudir)
if __name__ == '__main__':
checkAndInstall()
| Fix build wheels with Pandoc. | Fix build wheels with Pandoc.
| Python | bsd-3-clause | jr-garcia/AssimpCy,jr-garcia/AssimpCy |
d407f1bcd95daf4f4bd8dfe8ae3b4b9e68061cb5 | cref/sequence/fragment.py | cref/sequence/fragment.py |
def fragment(sequence, size=5):
"""
Fragment a string sequence using a sliding window given by size
:param sequence: String containing the sequence
:param size: Size of the window
:return: a fragment of the sequence with the given size
"""
for i in range(len(sequence) - size + 1):
yield sequence[i: i + size]
|
def fragment(sequence, size=5):
"""
Fragment a string sequence using a sliding window given by size
:param sequence: String containing the sequence
:param size: Size of the window
:return: a fragment of the sequence with the given size
"""
if size > 0:
for i in range(len(sequence) - size + 1):
yield sequence[i: i + size]
| Handle sliding window with size 0 | Handle sliding window with size 0
| Python | mit | mchelem/cref2,mchelem/cref2,mchelem/cref2 |
471681cc25f44f31792813a968074b6813efb38c | yle/serializers.py | yle/serializers.py | from rest_framework.fields import SerializerMethodField
from rest_framework.serializers import ModelSerializer, CharField, UUIDField
from alexa.settings import RADIO_LINK_BASE
from yle.models import News
class NewsSerializer(ModelSerializer):
uid = UUIDField(source='uuid')
updateDate = CharField(source='modified')
titleText = CharField(source='title')
streamUrl = CharField(source='audio_url')
mainText = CharField(source='content')
redirectionUrl = SerializerMethodField('get_redirection')
def get_redirection(self, obj):
return RADIO_LINK_BASE + obj.external_id
class Meta:
model = News
fields = ['uid', 'updateDate',
'titleText', 'streamUrl',
'mainText', 'redirectionUrl']
| from rest_framework.fields import SerializerMethodField
from rest_framework.serializers import ModelSerializer, CharField, UUIDField
from alexa.settings import RADIO_LINK_BASE
from yle.models import News
class NewsSerializer(ModelSerializer):
uid = UUIDField(source='uuid')
updateDate = CharField(source='created')
titleText = CharField(source='title')
streamUrl = CharField(source='audio_url')
mainText = CharField(source='content')
redirectionUrl = SerializerMethodField('get_redirection')
def get_redirection(self, obj):
return RADIO_LINK_BASE + obj.external_id
class Meta:
model = News
fields = ['uid', 'updateDate',
'titleText', 'streamUrl',
'mainText', 'redirectionUrl']
| Update date from creation time | Update date from creation time
| Python | mit | anttipalola/alexa |
dc186adbb1b49c821911af724725df4512fbf9f5 | socialregistration/templatetags/facebook_tags.py | socialregistration/templatetags/facebook_tags.py | from django import template
from django.conf import settings
from socialregistration.utils import _https
register = template.Library()
@register.inclusion_tag('socialregistration/facebook_js.html')
def facebook_js():
return {'facebook_api_key' : settings.FACEBOOK_API_KEY, 'is_https' : bool(_https())}
@register.inclusion_tag('socialregistration/facebook_button.html', takes_context=True)
def facebook_button(context):
if not 'request' in context:
raise AttributeError, 'Please add the ``django.core.context_processors.request`` context processors to your settings.TEMPLATE_CONTEXT_PROCESSORS set'
logged_in = context['request'].user.is_authenticated()
next = context['next'] if 'next' in context else None
return dict(next=next, logged_in=logged_in) | from django import template
from django.conf import settings
from socialregistration.utils import _https
register = template.Library()
@register.inclusion_tag('socialregistration/facebook_js.html')
def facebook_js():
return {'facebook_api_key' : settings.FACEBOOK_API_KEY, 'is_https' : bool(_https())}
@register.inclusion_tag('socialregistration/facebook_button.html', takes_context=True)
def facebook_button(context):
if not 'request' in context:
raise AttributeError, 'Please add the ``django.core.context_processors.request`` context processors to your settings.TEMPLATE_CONTEXT_PROCESSORS set'
logged_in = context['request'].user.is_authenticated()
if 'next' in context:
next = context['next']
else:
next = None
return dict(next=next, logged_in=logged_in) | Use syntax compatible with Python 2.4 | Use syntax compatible with Python 2.4
| Python | mit | bopo/django-socialregistration,bopo/django-socialregistration,bopo/django-socialregistration,kapt/django-socialregistration,lgapontes/django-socialregistration,mark-adams/django-socialregistration,0101/django-socialregistration,praekelt/django-socialregistration,flashingpumpkin/django-socialregistration,itmustbejj/django-socialregistration,Soovox/django-socialregistration,minlex/django-socialregistration,brodie/django-socialregistration,minlex/django-socialregistration,mark-adams/django-socialregistration,aditweb/django-socialregistration,aditweb/django-socialregistration,flashingpumpkin/django-socialregistration,brodie/django-socialregistration,minlex/django-socialregistration,amakhnach/django-socialregistration,mark-adams/django-socialregistration,aditweb/django-socialregistration,lgapontes/django-socialregistration,lgapontes/django-socialregistration,kapt/django-socialregistration |
2f16eb25db856b72138f6dfb7d19e799bd460287 | tests/test_helpers.py | tests/test_helpers.py | # -*- coding: utf-8 -*-
import pytest
from os.path import basename
from helpers import utils, fixture
@pytest.mark.skipif(pytest.config.getoption("--application") is not False, reason="application passed; skipping base module tests")
class TestHelpers():
def test_wildcards1():
d = utils.get_wildcards([('"{prefix}.bam"', "medium.bam")], {})
assert d['prefix'] == "medium"
def test_wildcards2():
d = utils.get_wildcards([('"{prefix}{ext,.bam}"', "medium.bam")], {})
assert d['ext'] == ".bam"
def test_wildcards3():
d = utils.get_wildcards([('"{prefix}.bar"', "/foo/bar/medium.bar")], {})
assert d['prefix'] == 'medium'
def test_wildcards4():
d = utils.get_wildcards([('config[\'foo\'] + ".bar"', "config.yaml")], {})
assert d == {}
def test_determine_fixture():
# Non-existent filetype
ft = fixture.determine_fixture('"{prefix}.bar"')
assert ft is None
ft = fixture.determine_fixture('"{prefix}.bam"')
assert basename(ft) == "PUR.HG00731.tiny.sort.bam"
ft = fixture.determine_fixture('config[\'foo\'] + ".dict"')
assert basename(ft) == "scaffolds.dict"
| # -*- coding: utf-8 -*-
import pytest
from os.path import basename
from helpers import utils, fixture
pytestmark = pytest.mark.skipif(pytest.config.getoption("--application") is not False, reason="application passed; skipping base module tests")
def test_wildcards1():
d = utils.get_wildcards([('"{prefix}.bam"', "medium.bam")], {})
assert d['prefix'] == "medium"
def test_wildcards2():
d = utils.get_wildcards([('"{prefix}{ext,.bam}"', "medium.bam")], {})
assert d['ext'] == ".bam"
def test_wildcards3():
d = utils.get_wildcards([('"{prefix}.bar"', "/foo/bar/medium.bar")], {})
assert d['prefix'] == 'medium'
def test_wildcards4():
d = utils.get_wildcards([('config[\'foo\'] + ".bar"', "config.yaml")], {})
assert d == {}
def test_determine_fixture():
# Non-existent filetype
ft = fixture.determine_fixture('"{prefix}.bar"')
assert ft is None
ft = fixture.determine_fixture('"{prefix}.bam"')
assert basename(ft) == "PUR.HG00731.tiny.sort.bam"
ft = fixture.determine_fixture('config[\'foo\'] + ".dict"')
assert basename(ft) == "scaffolds.dict"
| Use global pytestmark to skip tests; deprecate class | Use global pytestmark to skip tests; deprecate class
| Python | mit | percyfal/snakemake-rules,percyfal/snakemake-rules,percyfal/snakemakelib-rules,percyfal/snakemakelib-rules,percyfal/snakemakelib-rules |
723d7410b48fd4fc42ed9afe470ba3b37381599a | noxfile.py | noxfile.py | """Development automation."""
import nox
def _install_this_editable(session, *, extras=None):
if extras is None:
extras = []
session.install("flit")
session.run(
"flit",
"install",
"-s",
"--deps=production",
"--extras",
",".join(extras),
silent=True,
)
@nox.session
def lint(session):
session.install("pre-commit")
session.run("pre-commit", "run", "--all-files", *session.posargs)
@nox.session(python=["3.6", "3.7", "3.8"])
def test(session):
_install_this_editable(session, extras=["test"])
default_args = ["--cov-report", "term", "--cov", "sphinx_autobuild"]
args = session.posargs or default_args
session.run("pytest", *args)
@nox.session
def docs(session):
_install_this_editable(session, extras=["docs"])
session.run("sphinx-build", "-b", "html", "docs/", "build/docs")
| """Development automation."""
import nox
def _install_this_editable(session, *, extras=None):
if extras is None:
extras = []
session.install("flit")
session.run(
"flit",
"install",
"-s",
"--deps=production",
"--extras",
",".join(extras),
silent=True,
)
@nox.session
def lint(session):
session.install("pre-commit")
session.run("pre-commit", "run", "--all-files", *session.posargs)
@nox.session(python=["3.6", "3.7", "3.8"])
def test(session):
_install_this_editable(session, extras=["test"])
default_args = ["--cov-report", "term", "--cov", "sphinx_autobuild"]
args = session.posargs or default_args
session.run("pytest", *args)
@nox.session
def docs(session):
_install_this_editable(session, extras=["docs"])
session.run("sphinx-build", "-b", "html", "docs/", "build/docs")
@nox.session(name="docs-live")
def docs_live(session):
_install_this_editable(session, extras=["docs"])
session.run("sphinx-autobuild", "-b", "html", "docs/", "build/docs")
| Add docs-live to perform demo-runs | Add docs-live to perform demo-runs
| Python | mit | GaretJax/sphinx-autobuild |
41209aa3e27673f003ed62a46c9bfae0c19d0bf3 | il2fb/ds/airbridge/typing.py | il2fb/ds/airbridge/typing.py | # coding: utf-8
from pathlib import Path
from typing import Callable, Optional, List, Union
from il2fb.parsers.events.events import Event
EventOrNone = Optional[Event]
EventHandler = Callable[[Event], None]
IntOrNone = Optional[int]
StringProducer = Callable[[], str]
StringHandler = Callable[[str], None]
StringOrNone = Optional[str]
StringOrNoneProducer = Callable[[], StringOrNone]
StringOrPath = Union[str, Path]
StringList = List[str]
| # coding: utf-8
from pathlib import Path
from typing import Callable, Optional, List, Union
from il2fb.commons.events import Event
EventOrNone = Optional[Event]
EventHandler = Callable[[Event], None]
IntOrNone = Optional[int]
StringProducer = Callable[[], str]
StringHandler = Callable[[str], None]
StringOrNone = Optional[str]
StringOrNoneProducer = Callable[[], StringOrNone]
StringOrPath = Union[str, Path]
StringList = List[str]
| Update import of Event class | Update import of Event class
| Python | mit | IL2HorusTeam/il2fb-ds-airbridge |
462d94ddd57d2385889d2c6ef09563e38ffcccc9 | decisiontree/multitenancy/utils.py | decisiontree/multitenancy/utils.py | from django.conf import settings
from django.core.urlresolvers import reverse
from django.db.models import Q
def multitenancy_enabled():
return "decisiontree.multitenancy" in settings.INSTALLED_APPS
def get_tenants_for_user(user):
"""Return all tenants that the user can manage."""
from multitenancy.models import Tenant
tenants = Tenant.objects.all()
if not user.is_superuser:
user_is_manager = Q(tenantrole__user=user) | Q(group__tenantrole__user=user)
tenants = tenants.filter(user_is_manager)
return tenants
def get_link_class_from_model(model):
"""Get the tenant link model associated with the model class."""
model_class = model if isinstance(model, type) else type(model)
if not hasattr(model_class, 'tenantlink'):
raise TypeError("This method should only be used on tenant-enabled models.")
return model_class.tenantlink.related.model
def tenancy_reverse(request, url_name, *args, **kwargs):
"""Add tenancy information to the URL reversal if multitenancy is enabled."""
if multitenancy_enabled():
# reverse disallows mixing *args and **kwargs.
if args:
args = (request.group_slug, request.tenant_slug) + args
else:
kwargs.setdefault('group_slug', request.group_slug)
kwargs.setdefault('tenant_slug', request.tenant_slug)
return reverse(url_name, args=args, kwargs=kwargs)
| from django.conf import settings
from django.core.urlresolvers import reverse
from django.db.models import Q
def multitenancy_enabled():
return "decisiontree.multitenancy" in settings.INSTALLED_APPS
def get_tenants_for_user(user):
"""Return all tenants that the user can manage."""
from multitenancy.models import Tenant
tenants = Tenant.objects.all()
if not user.is_superuser:
user_is_manager = Q(tenantrole__user=user) | Q(group__tenantrole__user=user)
tenants = tenants.filter(user_is_manager)
return tenants
def get_link_class_from_model(model):
"""Get the tenant link model associated with the model class."""
model_class = model if isinstance(model, type) else type(model)
if not hasattr(model_class, 'tenantlink'):
raise TypeError("This method should only be used on tenant-enabled models.")
return model_class.tenantlink.related.model
def tenancy_reverse(request, url_name, *args, **kwargs):
"""Add tenancy information to the URL reversal if multitenancy is enabled."""
if multitenancy_enabled():
# reverse disallows mixing *args and **kwargs.
if args:
args = (request.group_slug, request.tenant_slug) + tuple(args)
else:
kwargs.setdefault('group_slug', request.group_slug)
kwargs.setdefault('tenant_slug', request.tenant_slug)
return reverse(url_name, args=args, kwargs=kwargs)
| Fix error if passing a list of args to tenancy_reverse | Fix error if passing a list of args to tenancy_reverse
| Python | bsd-3-clause | caktus/rapidsms-decisiontree-app,caktus/rapidsms-decisiontree-app,caktus/rapidsms-decisiontree-app |
7ee86e9b52292a8824dfa7bab632526cbb365b51 | routes.py | routes.py | # -*- coding:utf-8 -*-
from flask import request, redirect
import requests
cookiename = 'openAMUserCookieName'
amURL = 'https://openam.example.com/'
validTokenAPI = amURL + 'openam/identity/istokenvalid?tokenid='
loginURL = amURL + 'openam/UI/Login'
def session_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
usercookie = request.cookies.get(cookiename)
if usercookie:
amQuery = requests.get(validTokenAPI + usercookie)
if 'boolean=true' in amQuery.text:
return f(*args, **kwargs)
return redirect(loginURL)
return decorated_function
@app.route('/members_page')
@session_required
def members_page():
pass
| # -*- coding:utf-8 -*-
from flask import request, redirect
import requests
cookiename = 'openAMUserCookieName'
amURL = 'https://openam.example.com/'
validTokenAPI = amURL + 'openam/json/sessions/{token}?_action=validate'
loginURL = amURL + 'openam/UI/Login'
def session_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
usercookie = request.cookies.get(cookiename)
if usercookie:
amQuery = requests.post(validTokenAPI.format(token=usercookie))
if amQuery.json()['valid']:
return f(*args, **kwargs)
return redirect(loginURL)
return decorated_function
@app.route('/members_page')
@session_required
def members_page():
pass
| Use new OpenAM token validation endpoint | Use new OpenAM token validation endpoint
| Python | unlicense | timhberry/openam-flask-decorator |
463abcce738ca1c47729cc0e465da9dc399e21dd | examples/remote_download.py | examples/remote_download.py | #!/usr/bin/env python
# -*- encoding:utf-8 -*-
from xunleipy.remote import XunLeiRemote
def remote_download(username, password, rk_username, rk_password, download_links, proxy=None, path='C:/TD/', peer=0):
remote_client = XunLeiRemote(username, password, rk_username, rk_password, proxy=proxy)
remote_client.login()
peer_list = remote_client.get_remote_peer_list()
if len(peer_list) == 0:
print 'No valid remote devices'
return
pid = peer_list[peer]['pid']
return remote_client.add_urls_to_remote(pid, path, download_links)
if __name__ == '__main__':
import sys
download_link = sys.argv[1]
with open('config.json', 'r') as f:
import json
config = json.load(f)
username = config.get('username', '')
password = config.get('password', '')
rk_username = config.get('rk_username', '')
rk_password = config.get('rk_password', '')
proxy = config.get('proxy', None)
if not username or not password:
print 'Invalid username or password!'
else:
path = config.get('path', 'C:/TDDOWNLOAD/')
print remote_download(username, password, rk_username, rk_password, [download_link], proxy)
| #!/usr/bin/env python
# -*- encoding:utf-8 -*-
import sys
import os
from xunleipy.remote import XunLeiRemote
sys.path.append('/Users/gunner/workspace/xunleipy')
def remote_download(username,
password,
rk_username,
rk_password,
download_links,
proxy=None,
path='C:/TD/',
peer=0):
remote_client = XunLeiRemote(
username, password, rk_username, rk_password, proxy=proxy
)
remote_client.login()
peer_list = remote_client.get_remote_peer_list()
if len(peer_list) == 0:
print('No valid remote devices')
return
pid = peer_list[peer]['pid']
return remote_client.add_urls_to_remote(pid, path, download_links)
if __name__ == '__main__':
import sys
download_link = sys.argv[1]
with open('config.json', 'r') as f:
import json
config = json.load(f)
username = config.get('username', '')
password = config.get('password', '')
rk_username = config.get('rk_username', '')
rk_password = config.get('rk_password', '')
proxy = config.get('proxy', None)
if not username or not password:
print('Invalid username or password!')
else:
path = config.get('path', 'C:/TDDOWNLOAD/')
print(
remote_download(
username, password, rk_username,
rk_password, [download_link], proxy
)
)
| Change example style for python3 | Change example style for python3
| Python | mit | lazygunner/xunleipy |
ab47c678b37527a7b8a970b365503b65ffccda87 | populous/cli.py | populous/cli.py | import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
pass
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
| import click
from .loader import load_yaml
from .blueprint import Blueprint
from .exceptions import ValidationError, YAMLError
def get_blueprint(*files):
try:
return Blueprint.from_description(load_yaml(*files))
except (YAMLError, ValidationError) as e:
raise click.ClickException(e.message)
except Exception as e:
raise click.ClickException("Unexpected error during the blueprint "
"loading: {}".format(e.message))
@click.group()
@click.version_option()
def cli():
pass
@cli.command()
@click.argument('files', nargs=-1)
def predict(files):
"""
Predict how many objects will be created if the given files are used.
"""
blueprint = get_blueprint(*files)
for item in blueprint:
click.echo("{name}: {count} {by}".format(
name=item.name, count=item.total,
by="({} by {})".format(item.count.number, item.count.by)
if item.count.by else ""
))
| Handle unexpected errors properly in load_blueprint | Handle unexpected errors properly in load_blueprint
| Python | mit | novafloss/populous |
3b0e80a159c4544a69adf35f4871b9167335795c | examples/user_agent_test.py | examples/user_agent_test.py | import time
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_user_agent(self):
self.open('http://www.whatsmyua.info/')
user_agent = self.get_text("#custom-ua-string")
print("\n\nUser-Agent = %s\n" % user_agent)
print("Displaying User-Agent Info:")
print(self.get_text("#useragent"))
print("\nThe browser will close automatically in 7 seconds...")
time.sleep(7)
| import time
from seleniumbase import BaseCase
class MyTestClass(BaseCase):
def test_user_agent(self):
self.open('https://www.whatsmyua.info/')
user_agent = self.get_text("#custom-ua-string")
print("\n\nUser-Agent = %s\n" % user_agent)
print("Displaying User-Agent Info:")
print(self.get_text("#useragent"))
print("\nThe browser will close automatically in 7 seconds...")
time.sleep(7)
| Update the user agent test | Update the user agent test
| Python | mit | mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase |
0485e6dcaf19061812d0e571890e58b85b5dea12 | lava_results_app/utils.py | lava_results_app/utils.py | import os
import yaml
import logging
from django.utils.translation import ungettext_lazy
from django.conf import settings
def help_max_length(max_length):
return ungettext_lazy(
u"Maximum length: {0} character",
u"Maximum length: {0} characters",
max_length).format(max_length)
class StreamEcho(object):
def write(self, value):
return value
def description_filename(job_id):
logger = logging.getLogger('lava_results_app')
filename = os.path.join(settings.MEDIA_ROOT, 'job-output', 'job-%s' % job_id, 'description.yaml')
if not os.path.exists(filename):
logger.error("No description.yaml for job %s" % job_id)
return None
return filename
def description_data(job_id):
logger = logging.getLogger('lava_results_app')
filename = description_filename(job_id)
if not filename:
return {}
try:
data = yaml.load(open(filename, 'r'))
except yaml.YAMLError:
logger.error("Unable to parse description for %s" % job_id)
return {}
return data
| import os
import yaml
import logging
from django.utils.translation import ungettext_lazy
from django.conf import settings
def help_max_length(max_length):
return ungettext_lazy(
u"Maximum length: {0} character",
u"Maximum length: {0} characters",
max_length).format(max_length)
class StreamEcho(object):
def write(self, value):
return value
def description_filename(job_id):
logger = logging.getLogger('lava_results_app')
filename = os.path.join(settings.MEDIA_ROOT, 'job-output', 'job-%s' % job_id, 'description.yaml')
if not os.path.exists(filename):
logger.error("No description.yaml for job %s" % job_id)
return None
return filename
def description_data(job_id):
logger = logging.getLogger('lava_results_app')
filename = description_filename(job_id)
if not filename:
return {}
try:
data = yaml.load(open(filename, 'r'))
except yaml.YAMLError:
logger.error("Unable to parse description for %s" % job_id)
return {}
if not data:
return {}
return data
| Return an empty dict if no data | Return an empty dict if no data
Avoids a HTTP500 on slow instances where the file
may be created before data is written, causing the
YAML parser to return None.
Change-Id: I13b92941f3e368839a9665fe3197c706babd9335
| Python | agpl-3.0 | Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server |
28c6af1381a1fc38b20ce05e85f494f3ae2beeb4 | arcutils/masquerade/templatetags/masquerade.py | arcutils/masquerade/templatetags/masquerade.py | from django import template
from .. import perms
from ..settings import get_user_attr
register = template.Library()
@register.filter
def is_masquerading(user):
info = getattr(user, get_user_attr())
return info['is_masquerading']
@register.filter
def can_masquerade(user):
return perms.can_masquerade(user)
@register.filter
def can_masquerade_as(user, masquerade_user):
return perms.can_masquerade_as(user, masquerade_user)
| from django import template
from .. import perms
from ..settings import get_user_attr, is_enabled
register = template.Library()
@register.filter
def is_masquerading(user):
if not is_enabled():
return False
info = getattr(user, get_user_attr(), None)
return info['is_masquerading']
@register.filter
def can_masquerade(user):
return perms.can_masquerade(user)
@register.filter
def can_masquerade_as(user, masquerade_user):
return perms.can_masquerade_as(user, masquerade_user)
| Make is_masquerading template tag more robust | Make is_masquerading template tag more robust
When masquerading is not enabled, immediately return False to avoid
checking for a request attribute that won't be present.
| Python | mit | PSU-OIT-ARC/django-arcutils,wylee/django-arcutils,wylee/django-arcutils,PSU-OIT-ARC/django-arcutils |
98c2c311ad1a0797205da58ce4d3b7d9b4c66c57 | nova/policies/pause_server.py | nova/policies/pause_server.py | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:os-pause-server:%s'
pause_server_policies = [
policy.DocumentedRuleDefault(
POLICY_ROOT % 'pause',
base.RULE_ADMIN_OR_OWNER,
"Pause a server",
[
{
'path': '/servers/{server_id}/action (pause)',
'method': 'POST'
}
]
),
policy.DocumentedRuleDefault(
POLICY_ROOT % 'unpause',
base.RULE_ADMIN_OR_OWNER,
"Unpause a paused server",
[
{
'path': '/servers/{server_id}/action (unpause)',
'method': 'POST'
}
]
),
]
def list_rules():
return pause_server_policies
| # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:os-pause-server:%s'
pause_server_policies = [
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'pause',
check_str=base.RULE_ADMIN_OR_OWNER,
description="Pause a server",
operations=[
{
'path': '/servers/{server_id}/action (pause)',
'method': 'POST'
}
],
scope_types=['system', 'project']
),
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'unpause',
check_str=base.RULE_ADMIN_OR_OWNER,
description="Unpause a paused server",
operations=[
{
'path': '/servers/{server_id}/action (unpause)',
'method': 'POST'
}
],
scope_types=['system', 'project']
),
]
def list_rules():
return pause_server_policies
| Introduce scope_types in pause server policy | Introduce scope_types in pause server policy
oslo.policy introduced the scope_type feature which can
control the access level at system-level and project-level.
- https://docs.openstack.org/oslo.policy/latest/user/usage.html#setting-scope
- http://specs.openstack.org/openstack/keystone-specs/specs/keystone/queens/system-scope.html
Appropriate scope_type for nova case:
- https://specs.openstack.org/openstack/nova-specs/specs/ussuri/approved/policy-defaults-refresh.html#scope
This commit introduce scope_type for pause server API policies
as:
- ['system', 'project'] for pause/unpause policy.
Also adds the test case with scope_type enabled and verify we
pass and fail the policy check with expected context.
Partial implement blueprint policy-defaults-refresh
Change-Id: I828248ec42c71d67c8d9463d987d0afe54989c74
| Python | apache-2.0 | openstack/nova,klmitch/nova,klmitch/nova,mahak/nova,mahak/nova,mahak/nova,klmitch/nova,openstack/nova,openstack/nova,klmitch/nova |
263e517004df36938b430d8802d4fc80067fadf5 | djangoreact/urls.py | djangoreact/urls.py | from django.conf.urls import url, include
from django.contrib import admin
from rest_framework import routers
from server import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
router.register(r'groups', views.GroupViewSet)
urlpatterns = [
url(r'^$', views.index),
url(r'^api/auth/', include('rest_auth.urls')),
url(r'^api/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls')),
url(r'^admin/', admin.site.urls),
]
| from django.conf.urls import url, include
from django.contrib import admin
from rest_framework import routers
from server import views
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
router.register(r'groups', views.GroupViewSet)
urlpatterns = [
url(r'^api/auth/', include('rest_auth.urls')),
url(r'^api/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls')),
url(r'^admin/', admin.site.urls),
url(r'^', views.index),
]
| Fix to use react-router for all unmatched routes. | Fix to use react-router for all unmatched routes.
| Python | mit | willy-claes/django-react,willy-claes/django-react,willy-claes/django-react |
f83282b1747e255d35e18e9fecad1750d1564f9e | do_record/record.py | do_record/record.py | """DigitalOcean DNS Records."""
from certbot_dns_auth.printer import printer
from do_record import http
class Record(object):
"""Handle DigitalOcean DNS records."""
def __init__(self, api_key, domain, hostname):
self._number = None
self.domain = domain
self.hostname = hostname
self.api_key = api_key
def create(self, value):
"""Create this record on DigitalOcean with the supplied value."""
self._number = http.create(self, value)
return self.number
def delete(self, record_id=None):
"""Delete this record on DigitalOcean, identified by record_id."""
if record_id is None:
record_id = self.number
http.delete(self, record_id)
def printer(self):
printer(self.number)
@property
def number(self):
return self._number
@number.setter
def number(self, value):
if self.number is None:
self._number = value
else:
raise ValueError(
'Cannot externally reset a record\'s number identifier.')
| """DigitalOcean DNS Records."""
from certbot_dns_auth.printer import printer
from do_record import http
class Record(object):
"""Handle DigitalOcean DNS records."""
def __init__(self, api_key, domain, hostname):
self._number = None
self.domain = domain
self.hostname = hostname
self.api_key = api_key
def create(self, value):
"""Create this record on DigitalOcean with the supplied value."""
self._number = http.create(self, value)
return self.number
def delete(self, record_id=None):
"""Delete this record on DigitalOcean, identified by record_id."""
if record_id is None:
record_id = self.number
http.delete(self, record_id)
def printer(self):
printer(self.number)
@property
def number(self):
return self._number
@number.setter
def number(self, value):
self._number = value
| Remove Code That Doesn't Have a Test | Remove Code That Doesn't Have a Test
| Python | apache-2.0 | Jitsusama/lets-do-dns |
1633b9a1ace74a5a7cbf445ce7ceb790d0411e79 | modules/__init__.py | modules/__init__.py | #pipe2py modules package
#Author: Greg Gaughan
__all__ = ['pipefetch',
'pipefetchdata',
'pipedatebuilder',
'pipeurlbuilder',
'pipetextinput',
'pipeurlinput',
'pipefilter',
'pipeunion',
'pipeoutput',
]
| #pipe2py modules package
#Author: Greg Gaughan
#Note: each module name must match the name used internally by Yahoo, preceded by pipe
__all__ = ['pipefetch',
'pipefetchdata',
'pipedatebuilder',
'pipeurlbuilder',
'pipetextinput',
'pipeurlinput',
'pipefilter',
'pipeunion',
'pipeoutput',
]
| Add comment about module naming | Add comment about module naming
| Python | mit | nerevu/riko,nerevu/riko |
413c3e9e8a093e3f336e27a663f347f5ea9866a6 | performanceplatform/collector/ga/__init__.py | performanceplatform/collector/ga/__init__.py | from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from performanceplatform.collector.ga.core \
import create_client, query_documents_for, send_data
from performanceplatform.collector.write import DataSet
def main(credentials, data_set_config, query, options, start_at, end_at):
client = create_client(credentials)
documents = query_documents_for(
client, query, options,
data_set_config['data-type'], start_at, end_at
)
data_set = DataSet.from_config(data_set_config)
send_data(data_set, documents)
| from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from performanceplatform.collector.ga.core \
import create_client, query_documents_for, send_data
from performanceplatform.collector.write import DataSet
def main(credentials, data_set_config, query, options, start_at, end_at):
client = create_client(credentials)
documents = query_documents_for(
client, query, options,
options.get('dataType', data_set_config['data-type']),
start_at, end_at)
data_set = DataSet.from_config(data_set_config)
send_data(data_set, documents)
| Allow the 'dataType' field to be overriden | Allow the 'dataType' field to be overriden
The 'dataType' field in records predates data groups and data types. As
such they don't always match the new world order of data types. It's
fine to change in all cases other than Licensing which is run on
limelight, that we don't really want to touch.
| Python | mit | alphagov/performanceplatform-collector,alphagov/performanceplatform-collector,alphagov/performanceplatform-collector |
95542ab1b7c22a6e0160e242349c66f2cef7e390 | syntacticframes_project/syntacticframes/management/commands/check_correspondance_errors.py | syntacticframes_project/syntacticframes/management/commands/check_correspondance_errors.py | from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetClass
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for vn_class in VerbNetClass.objects.all():
try:
parse.get_ladl_list(vn_class.ladl_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
try:
parse.get_lvf_list(vn_class.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(vn_class.name, e))
| from django.core.management.base import BaseCommand
from syntacticframes.models import VerbNetFrameSet
from parsecorrespondance import parse
from loadmapping import mapping
class Command(BaseCommand):
def handle(self, *args, **options):
for frameset in VerbNetFrameSet.objects.all():
print("{}: {}/{}".format(frameset.name, frameset.ladl_string, frameset.lvf_string))
if frameset.ladl_string:
try:
parse.FrenchMapping('LADL', frameset.ladl_string).result()
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
if frameset.lvf_string:
try:
parse.FrenchMapping('LVF', frameset.lvf_string)
except parse.UnknownClassException as e:
print('{:<30} {}'.format(frameset.name, e))
| Check correspondances in framesets now | Check correspondances in framesets now
| Python | mit | aymara/verbenet-editor,aymara/verbenet-editor,aymara/verbenet-editor |
6c54fc230e8c889a2351f20b524382a5c6e29d1c | examples/apps.py | examples/apps.py | # coding: utf-8
import os
import sys
from pysuru import TsuruClient
TSURU_TARGET = os.environ.get('TSURU_TARGET', None)
TSURU_TOKEN = os.environ.get('TSURU_TOKEN', None)
if not TSURU_TARGET or not TSURU_TOKEN:
print('You must set TSURU_TARGET and TSURU_TOKEN.')
sys.exit(1)
api = TsuruClient(TSURU_TARGET, TSURU_TOKEN)
# List all apps that this token has access to
for app in api.apps:
print(app.name)
# Update one specific app
api.apps.update('my-awesome-app', {'description': 'My awesome app'})
# Get information for one app
app = App.get('my-awesome-app')
print('%s: %s' % (app.name, app.description))
# List all services instances for app
for service in app.services:
print('Service: %s' % service.name)
| # coding: utf-8
import os
import sys
from pysuru import TsuruClient
TSURU_TARGET = os.environ.get('TSURU_TARGET', None)
TSURU_TOKEN = os.environ.get('TSURU_TOKEN', None)
if not TSURU_TARGET or not TSURU_TOKEN:
print('You must set TSURU_TARGET and TSURU_TOKEN env variables.')
sys.exit(1)
# Creating TsuruClient instance
tsuru = TsuruClient(TSURU_TARGET, TSURU_TOKEN)
# List all apps that this user has access to
for app in tsuru.apps.list():
print('App: {}'.format(app.name))
# Get information for one app
app = tsuru.apps.get('my-awesome-app')
print('{app.name}: {app.description}'.format(app=app))
# Update specific app
tsuru.apps.update('my-awesome-app', {'description': 'My new awesome description'})
| Update examples to match docs | Update examples to match docs
Use the interface defined in the docs in the examples scripts.
| Python | mit | rcmachado/pysuru |
5af4ef36ff7a56b34fc8d30df37c82a6837918e3 | pambox/speech/__init__.py | pambox/speech/__init__.py | """
The :mod:`pambox.speech` module gather speech intelligibility
models.
"""
from __future__ import absolute_import
from .binauralsepsm import BinauralSepsm
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
from .experiment import Experiment
__all__ = [
'BinauralSepsm',
'Sepsm',
'MrSepsm',
'Sii',
'Material',
'Experiment'
]
| """
The :mod:`pambox.speech` module gather speech intelligibility
models.
"""
from __future__ import absolute_import
from .binauralsepsm import BinauralSepsm
from .binauralmrsepsm import BinauralMrSepsm
from .sepsm import Sepsm
from .mrsepsm import MrSepsm
from .sii import Sii
from .material import Material
from .experiment import Experiment
__all__ = [
'BinauralSepsm',
'BinauralMrSepsm',
'Sepsm',
'MrSepsm',
'Sii',
'Material',
'Experiment'
]
| Include both binaural mr-sEPSM and sEPSM | Include both binaural mr-sEPSM and sEPSM
| Python | bsd-3-clause | achabotl/pambox |
b5a8e7b6926bf7224abed6bd335d62b3f1ad1fb1 | performance_testing/command_line.py | performance_testing/command_line.py | import os
import yaml
from performance_testing.errors import ConfigFileError, ConfigKeyError
from performance_testing import web
from datetime import datetime as date
from time import time
class Tool:
def __init__(self, config='config.yml', result_directory='result'):
self.read_config(config_file=config)
self.create_result_file(directory=result_directory)
def read_config(self, config_file):
try:
config_stream = open(config_file, 'r')
config_data = yaml.load(config_stream)
config_stream.close()
self.host = config_data['host']
self.requests = config_data['requests']
self.clients = config_data['clients']
self.time = config_data['time']
self.urls = config_data['urls']
except KeyError as ex:
raise ConfigKeyError(ex.args[0])
except IOError:
raise ConfigFileError(config_file)
def create_result_file(self, directory):
datetime = date.fromtimestamp(time())
file_name = '%d-%d-%d_%d-%d-%d' % (datetime.year,
datetime.month,
datetime.day,
datetime.hour,
datetime.minute,
datetime.second)
file_path = os.path.join(directory, file_name)
if not os.path.exists(directory):
os.makedirs(directory)
open(file_path, 'a').close()
self.result_file = file_path
def start_testing(self):
pass
def run(self):
file_stream = open(self.result_file, 'w')
print('Start tests ...')
for url in self.urls:
full_url = self.host + url
file_stream.write('URL: %s\n' % url)
for i in range(0, self.requests):
file_stream.write(' %i - %.3f\n' % (i, web.request(full_url)))
print('Finished tests!')
| import os
import yaml
from performance_testing.errors import ConfigFileError, ConfigKeyError
from performance_testing import web
from performance_testing.config import Config
from performance_testing.result import Result
class Tool:
def __init__(self, config='config.yml', result_directory='result'):
self.config = Config(config_path=config)
self.result = Result(result_directory)
def start_testing(self):
pass
def run(self):
print('Start tests ...')
for url in self.config.urls:
full_url = self.config.host + url
self.result.file.write_line('URL: %s\n' % url)
for i in range(0, self.config.requests):
self.result.file.write_line(' %i - %.3f\n' % (i, web.request(full_url)))
print('Finished tests!')
| Use Config and Result class in Tool | Use Config and Result class in Tool
| Python | mit | BakeCode/performance-testing,BakeCode/performance-testing |
5fc699b89eae0c41923a813ac48281729c4d80b8 | orderable_inlines/inlines.py | orderable_inlines/inlines.py | from django.contrib.admin import StackedInline, TabularInline
from django.template.defaultfilters import slugify
class OrderableInlineMixin(object):
class Media:
js = (
'js/jquery.browser.min.js',
'js/orderable-inline-jquery-ui.js',
'js/orderable-inline.js',
)
css = {
'all': [
'css/orderable-inline.css'
]
}
def get_fieldsets(self, request, obj=None):
if self.declared_fieldsets:
return self.declared_fieldsets
form = self.get_formset(request, obj, fields=None).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [
(None, {
'fields': fields,
'classes': self.fieldset_css_classes + ['orderable-field-%s' % self.orderable_field]
})
]
class OrderableStackedInline(OrderableInlineMixin, StackedInline):
fieldset_css_classes = ['orderable-stacked']
class OrderableTabularInline(OrderableInlineMixin, TabularInline):
fieldset_css_classes = ['orderable-tabular']
template = 'orderable_inlines/edit_inline/tabular.html'
| from django.contrib.admin import StackedInline, TabularInline
from django.template.defaultfilters import slugify
class OrderableInlineMixin(object):
class Media:
js = (
'js/jquery.browser.min.js',
'js/orderable-inline-jquery-ui.js',
'js/orderable-inline.js',
)
css = {
'all': [
'css/orderable-inline.css'
]
}
def get_fieldsets(self, request, obj=None):
form = self.get_formset(request, obj, fields=None).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [
(None, {
'fields': fields,
'classes': self.fieldset_css_classes + ['orderable-field-%s' % self.orderable_field]
})
]
class OrderableStackedInline(OrderableInlineMixin, StackedInline):
fieldset_css_classes = ['orderable-stacked']
class OrderableTabularInline(OrderableInlineMixin, TabularInline):
fieldset_css_classes = ['orderable-tabular']
template = 'orderable_inlines/edit_inline/tabular.html'
| Make this hack compatible with Django 1.9 | Make this hack compatible with Django 1.9
| Python | bsd-2-clause | frx0119/django-orderable-inlines,frx0119/django-orderable-inlines |
533fb21586322c26fd9696213108d6a9e45ada64 | lib/ansible/cache/base.py | lib/ansible/cache/base.py | import exceptions
class BaseCacheModule(object):
def get(self, key):
raise exceptions.NotImplementedError
def set(self, key, value):
raise exceptions.NotImplementedError
def keys(self):
raise exceptions.NotImplementedError
def contains(self, key):
raise exceptions.NotImplementedError
def delete(self, key):
raise exceptions.NotImplementedError
def flush(self):
raise exceptions.NotImplementedError
| # (c) 2014, Brian Coca, Josh Drake, et al
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import exceptions
class BaseCacheModule(object):
def get(self, key):
raise exceptions.NotImplementedError
def set(self, key, value):
raise exceptions.NotImplementedError
def keys(self):
raise exceptions.NotImplementedError
def contains(self, key):
raise exceptions.NotImplementedError
def delete(self, key):
raise exceptions.NotImplementedError
def flush(self):
raise exceptions.NotImplementedError
| Add copyright header, let me know if corrections are needed. | Add copyright header, let me know if corrections are needed.
| Python | mit | thaim/ansible,thaim/ansible |
aff0eba2c0f7f5a0c9bebbfc9402f04c2c9d6d11 | preference/miExecPref.py | preference/miExecPref.py | import os
import json
SCRIPT_PATH = os.path.dirname(__file__)
def getPreference():
""" Load pref json data nad return as dict"""
prefFile = open(os.path.join(SCRIPT_PATH, "miExecPref.json"), 'r')
prefDict = json.load(prefFile)
prefFile.close()
return prefDict
def getWindowSetting():
""" Load window setting json data and return as dict"""
prefDict = getPreference()
pardir = os.path.join(SCRIPT_PATH, os.pardir)
windowFilePath = os.path.join(
pardir,
"style",
prefDict['style'],
"window.json")
windowFile = open(windowFilePath, 'r')
windowDict = json.load(windowFile)
windowFile.close()
return windowDict
| import os
import json
import maya.cmds as cmds
SCRIPT_PATH = os.path.dirname(__file__)
MAYA_SCRIPT_DIR = cmds.internalVar(userScriptDir=True)
def getPreference():
""" Load pref json data nad return as dict"""
for root, dirs, files in os.walk(MAYA_SCRIPT_DIR):
if 'miExecPref.json' in files:
# Load pref json file from user script dir if exists.
abspath = os.path.join(root, 'miExecPref.json')
prefFile = open(abspath, 'r')
else:
# Load pref json file from miExec package directory.
prefFile = open(os.path.join(SCRIPT_PATH, "miExecPref.json"), 'r')
prefDict = json.load(prefFile)
prefFile.close()
return prefDict
def getWindowSetting():
""" Load window setting json data and return as dict"""
prefDict = getPreference()
pardir = os.path.join(SCRIPT_PATH, os.pardir)
windowFilePath = os.path.join(
pardir,
"style",
prefDict['style'],
"window.json")
windowFile = open(windowFilePath, 'r')
windowDict = json.load(windowFile)
windowFile.close()
return windowDict
| Load user pref file if exists in the maya user script directory | Load user pref file if exists in the maya user script directory
| Python | mit | minoue/miExecutor |
64ed32aa5e2e36ce58209b0e356f7482137a81f2 | getMesosStats.py | getMesosStats.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
import json
import argparse
def get_metric(host, port, metric):
response = urllib2.urlopen(
'http://' + host + ':' + port + '/metrics/snapshot')
data = json.load(response)
# print json.dumps(data, indent=4, sort_keys=True)
print data[metric]
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='Mesos metrics')
arg_parser.add_argument(
'-H', '--host', help="Specify host or ip address", required=True)
arg_parser.add_argument(
'-p', '--port', help="Specify mesos api port", required=True)
arg_parser.add_argument(
'-m', '--metric', help="Specify metric's name", required=True)
arguments = arg_parser.parse_args()
get_metric(arguments.host, arguments.port, arguments.metric)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
import json
import argparse
def get_metric(host, port, metric):
response = urllib2.urlopen(
'http://' + host + ':' + port + '/metrics/snapshot')
data = json.load(response)
# print json.dumps(data, indent=4, sort_keys=True)
try:
print data[metric]
except KeyError:
print "ZBX_NOT_SUPPORTED"
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='Mesos metrics')
arg_parser.add_argument(
'-H', '--host', help="Specify host or ip address", required=True)
arg_parser.add_argument(
'-p', '--port', help="Specify mesos api port", required=True)
arg_parser.add_argument(
'-m', '--metric', help="Specify metric's name", required=True)
arguments = arg_parser.parse_args()
get_metric(arguments.host, arguments.port, arguments.metric)
| Add KeyError exception and ZBX_NOT_SUPPORTED message. | Add KeyError exception and ZBX_NOT_SUPPORTED message.
| Python | mit | zolech/zabbix-mesos-template |
7b5850d1b89d34ff9a60c3862d18691961c86656 | poisson/tests/test_irf.py | poisson/tests/test_irf.py | #!/usr/bin/env python
from numpy.testing import assert_almost_equal, assert_array_less
import numpy as np
from poisson import BmiPoisson
def test_grid_initialize():
model = BmiPoisson()
model.initialize()
assert_almost_equal(model.get_current_time(), 0.)
assert_array_less(model.get_value('land_surface__elevation'), 1.)
assert_array_less(0., model.get_value('land_surface__elevation'))
def test_update():
model = BmiPoisson()
model.initialize()
for time in xrange(10):
model.update()
assert_almost_equal(model.get_current_time(), time + 1.)
def test_update_until():
model = BmiPoisson()
model.initialize()
model.update_until(10.1)
assert_almost_equal(model.get_current_time(), 10.1)
def test_finalize():
model = BmiPoisson()
model.initialize()
model.update()
model.finalize()
| #!/usr/bin/env python
from nose.tools import assert_equal
from numpy.testing import assert_almost_equal, assert_array_less
import numpy as np
from poisson import BmiPoisson
def test_initialize_defaults():
model = BmiPoisson()
model.initialize()
assert_almost_equal(model.get_current_time(), 0.)
assert_array_less(model.get_value('land_surface__elevation'), 1.)
assert_array_less(0., model.get_value('land_surface__elevation'))
def test_initialize_from_file_like():
from StringIO import StringIO
import yaml
config = StringIO(yaml.dump({'shape': (7, 5)}))
model = BmiPoisson()
model.initialize(config)
assert_equal(model.get_grid_shape('land_surface__elevation'), (7, 5))
def test_initialize_from_file():
import os
import yaml
import tempfile
with tempfile.NamedTemporaryFile('w', delete=False) as fp:
fp.write(yaml.dump({'shape': (7, 5)}))
name = fp.name
model = BmiPoisson()
model.initialize(name)
os.remove(name)
assert_equal(model.get_grid_shape('land_surface__elevation'), (7, 5))
def test_update():
model = BmiPoisson()
model.initialize()
for time in xrange(10):
model.update()
assert_almost_equal(model.get_current_time(), time + 1.)
def test_update_until():
model = BmiPoisson()
model.initialize()
model.update_until(10.1)
assert_almost_equal(model.get_current_time(), 10.1)
def test_finalize():
model = BmiPoisson()
model.initialize()
model.update()
model.finalize()
| Test initialize with filename and file-like. | Test initialize with filename and file-like.
| Python | mit | mperignon/bmi-delta,mperignon/bmi-STM,mperignon/bmi-STM,mperignon/bmi-delta |
fa824cd22d47dd85d7ea067ff9063214e5517f94 | gem/context_processors.py | gem/context_processors.py | from molo.profiles.forms import RegistrationForm
from molo.profiles.forms import EditProfileForm, ProfilePasswordChangeForm
def default_forms(request):
return {
'registration_form': RegistrationForm(),
'edit_profile_form': EditProfileForm(),
'password_change_form': ProfilePasswordChangeForm()
}
# TODO: remove this context processor
def detect_freebasics(request):
return {
'is_via_freebasics':
'Internet.org' in request.META.get('HTTP_VIA', '') or
'InternetOrgApp' in request.META.get('HTTP_USER_AGENT', '') or
'true' in request.META.get('HTTP_X_IORG_FBS', '')
}
| from molo.profiles.forms import ProfilePasswordChangeForm
def default_forms(request):
return {
'password_change_form': ProfilePasswordChangeForm()
}
# TODO: remove this context processor
def detect_freebasics(request):
return {
'is_via_freebasics':
'Internet.org' in request.META.get('HTTP_VIA', '') or
'InternetOrgApp' in request.META.get('HTTP_USER_AGENT', '') or
'true' in request.META.get('HTTP_X_IORG_FBS', '')
}
| Remove unused forms from context processor | Remove unused forms from context processor
These forms are not used by any view or template. On every request
Django is generating the entire HTML form for these 2 forms which
is a whole load of unnecessary CPU.
| Python | bsd-2-clause | praekelt/molo-gem,praekelt/molo-gem,praekelt/molo-gem |
8ebe0966c64f344b79c450782661f4c6cab6a312 | modish/__init__.py | modish/__init__.py | # -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot, barplot
__author__ = 'Olga Botvinnik'
__email__ = '[email protected]'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP']
class ModishTestResult(object):
def __init__(self, original_data, estimator, modality_assignments,
bayesian_fit, data_with_noise, waypoint_transformer,
waypoints):
self.original_data = original_data
self.estimator = estimator
self.modality_assignments = modality_assignments
self.bayesian_fit = bayesian_fit
self.data_with_noise = data_with_noise
self.waypoint_transformer = waypoint_transformer
self.waypoints = waypoints
| # -*- coding: utf-8 -*-
from .model import ModalityModel
from .estimator import ModalityEstimator, ModalityPredictor
from .visualize import MODALITY_TO_COLOR, MODALITY_ORDER, MODALITY_PALETTE,\
MODALITY_TO_CMAP, ModalitiesViz, violinplot, barplot
__author__ = 'Olga Botvinnik'
__email__ = '[email protected]'
__version__ = '0.1.0'
__all__ = ['ModalityModel', 'ModalityEstimator', 'MODALITY_ORDER',
'MODALITY_PALETTE', 'MODALITY_TO_COLOR', 'ModalitiesViz',
'violinplot', 'MODALITY_TO_CMAP', 'ModalityPredictor']
class ModishTestResult(object):
def __init__(self, original_data, estimator, modality_assignments,
bayesian_fit, data_with_noise, waypoint_transformer,
waypoints):
self.original_data = original_data
self.estimator = estimator
self.modality_assignments = modality_assignments
self.bayesian_fit = bayesian_fit
self.data_with_noise = data_with_noise
self.waypoint_transformer = waypoint_transformer
self.waypoints = waypoints
| Add modality predictor to output | Add modality predictor to output
| Python | bsd-3-clause | YeoLab/anchor,olgabot/modish,olgabot/anchor |
7c68e3b00e7c66c0223617447e16a7159118d284 | goldstone/addons/utils.py | goldstone/addons/utils.py | """Addon utilities."""
# Copyright 2015 Solinea, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def update_addon_node():
"""Update the persistent resource graph's Addon node.
This is much simpler than the update_xxxxx_nodes functions that update
nodes for cloud entities. There will be only one Addon node in the table,
and all add-ons will be owned by it. If we're running for the first time,
the Addon node needs to be created. If it's already there, we leave it
alone.
"""
from goldstone.core.models import Addon
Addon.objects.get_or_create(native_id="Add-on", native_name="Add-on")
| """Addon utilities."""
# Copyright 2015 Solinea, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def update_addon_node():
"""Update the persistent resource graph's Addon node.
This is much simpler than the update_xxxxx_nodes functions that update
nodes for cloud entities. There will be only one Addon node in the table,
and all add-ons will be owned by it. If we're running for the first time,
the Addon node needs to be created. If it's already there, we leave it
alone.
This also differs from update_xxxxx_nodes by returning the Addon node that
is found or created.
"""
from goldstone.core.models import Addon
result, _ = Addon.objects.get_or_create(native_id="Add-on",
native_name="Add-on")
return result
| Change update_addon_node() to return the Addon node, whether created or found. | Change update_addon_node() to return the Addon node, whether created or found.
| Python | apache-2.0 | slashk/goldstone-server,slashk/goldstone-server,Solinea/goldstone-server,slashk/goldstone-server,slashk/goldstone-server,Solinea/goldstone-server,Solinea/goldstone-server,Solinea/goldstone-server,Solinea/goldstone-server,slashk/goldstone-server |