commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 1
2.94k
⌀ | new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
|
---|---|---|---|---|---|---|---|---|---|
2a3d62e4edfd33857feec6fbf20122d2c1a113f8 | add_labels/label_data.py | add_labels/label_data.py | import pandas as pd
import subprocess
import sys
import os
source = sys.argv[1]
dest = sys.argv[2]
labels = sys.argv[3]
df = pd.read_csv(labels)
df = df.fillna('EMPTY')
subprocess.call(['mkdir', '-p', dest])
for subjects in list(set(df.Subject)):
subject_list = subjects.split(', ')
for subject in subject_list:
print(dest)
print(subject)
subprocess.call(['mkdir', '-p', os.path.join(dest, subject)])
folders = [file.split('/')[-2] for file in df.SourceFile]
filenames = [file.split('/')[-1] for file in df.SourceFile]
for folder, filename, subjects in zip(folders, filenames, df.Subject):
subject_list = subjects.split(', ')
for subject in subject_list:
subprocess.call(['cp', os.path.join(source, folder, filename), os.path.join(dest, subject, filename)]) | import pandas as pd
import subprocess
import sys
import os
source = sys.argv[1]
dest = sys.argv[2]
labels = sys.argv[3]
df = pd.read_csv(labels)
df = df.fillna('EMPTY')
subprocess.call(['mkdir', '-p', dest])
for subjects in list(set(df.Subject)):
subject_list = subjects.split(', ')
for subject in subject_list:
print(dest)
print(subject)
subprocess.call(['mkdir', '-p', os.path.join(dest, subject)])
folders = [file.split('/')[-2] for file in df.SourceFile]
filenames = [file.split('/')[-1] for file in df.SourceFile]
for folder, filename, subjects in zip(folders, filenames, df.Subject):
subject_list = subjects.split(', ')
for subject in subject_list:
subprocess.call(['mv', os.path.join(source, folder, filename), os.path.join(dest, subject, filename)]) | Update label data to point at correct spots | Update label data to point at correct spots
| Python | mit | matthew-sochor/trail-cam-detector,matthew-sochor/trail-cam-detector |
aa77e74c02ec7276c233454806d55fdb32899a13 | __init__.py | __init__.py |
# import subpackages
from . import advection
from . import cascade
from . import io
from . import noise
from . import nowcasts
from . import optflow
from . import postprocessing
from . import timeseries
from . import utils
from . import verification
from . import visualization
|
# import subpackages
from . import advection
from . import cascade
from . import io
from . import noise
from . import nowcasts
from . import optflow
from . import postprocessing
from . import timeseries
from . import utils
from . import verification as vf
from . import visualization as plt
| Use namespaces plt and vf for visualization and verification modules | Use namespaces plt and vf for visualization and verification modules
| Python | bsd-3-clause | pySTEPS/pysteps |
b1153bc6e8b8b132c146076aeeb6b86ec4f54365 | __init__.py | __init__.py | if 'loaded' in locals():
import imp
imp.reload(blendergltf)
from .blendergltf import *
else:
loaded = True
from .blendergltf import * | bl_info = {
"name": "glTF format",
"author": "Daniel Stokes",
"version": (0, 1, 0),
"blender": (2, 76, 0),
"location": "File > Import-Export",
"description": "Export glTF",
"warning": "",
"wiki_url": ""
"",
"support": 'TESTING',
"category": "Import-Export"}
# Treat as module
if '.' in __name__:
if 'loaded' in locals():
import imp
imp.reload(blendergltf)
from .blendergltf import *
else:
loaded = True
from .blendergltf import *
# Treat as addon
else:
if "bpy" in locals():
import importlib
importlib.reload(blendergltf)
import json
import bpy
from bpy.props import (
StringProperty,
)
from bpy_extras.io_utils import (
ExportHelper,
)
from . import blendergltf
class ExportGLTF(bpy.types.Operator, ExportHelper):
"""Save a Khronos glTF File"""
bl_idname = "export_scene.gltf"
bl_label = 'Export glTF'
filename_ext = ".gltf"
filter_glob = StringProperty(
default="*.gltf",
options={'HIDDEN'},
)
check_extension = True
def execute(self, context):
scene = {
'camera': bpy.data.cameras,
'lamps': bpy.data.lamps,
'images': bpy.data.images,
'materials': bpy.data.materials,
'meshes': bpy.data.meshes,
'objects': bpy.data.objects,
'scenes': bpy.data.scenes,
'textures': bpy.data.textures,
}
gltf = blendergltf.export_gltf(scene)
with open(self.filepath, 'w') as fout:
json.dump(gltf, fout, indent=4)
return {'FINISHED'}
def menu_func_export(self, context):
self.layout.operator(ExportGLTF.bl_idname, text="glTF (.gltf)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func_export) | Add experimental support to run module as Blender addon | Add experimental support to run module as Blender addon
| Python | apache-2.0 | Kupoman/blendergltf,lukesanantonio/blendergltf |
8c81f606499ebadddaf2a362bc8845eb69a21e8d | lds-gen.py | lds-gen.py | #!/usr/bin/env python
#
#
# Generate linker script to only expose symbols of the public API
#
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('};')
| #!/usr/bin/env python
#
#
# Generate linker script to only expose symbols of the public API
#
import sys
import re
if __name__ == '__main__':
funcs = list()
last_line = ''
for line in sys.stdin:
m = re.match(r'^(\S+.*\s+\**)?(rd_kafka_\S+)\s*\(', line)
if m:
sym = m.group(2)
m2 = re.match(r'(RD_UNUSED|__attribute__\(\(unused\)\))', line)
if not m2:
funcs.append(sym)
last_line = ''
else:
last_line = line
print('# Automatically generated by lds-gen.py - DO NOT EDIT')
print('{\n global:')
if len(funcs) == 0:
print(' *;')
else:
for f in sorted(funcs):
print(' %s;' % f)
print('local:\n *;')
print('};')
| Stop exporting internal symbols from the shared libraries. | Stop exporting internal symbols from the shared libraries.
| Python | bsd-2-clause | orthrus/librdkafka,klonikar/librdkafka,klonikar/librdkafka,senior7515/librdkafka,janmejay/librdkafka,senior7515/librdkafka,orthrus/librdkafka,klonikar/librdkafka,janmejay/librdkafka,orthrus/librdkafka,janmejay/librdkafka,senior7515/librdkafka,senior7515/librdkafka,klonikar/librdkafka,orthrus/librdkafka,janmejay/librdkafka |
b07d74f99338165f8bb83ac0599452b021b96a8f | django_boolean_sum.py | django_boolean_sum.py | from django.conf import settings
from django.db.models.aggregates import Sum
from django.db.models.sql.aggregates import Sum as BaseSQLSum
class SQLSum(BaseSQLSum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
function = None
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
| from django.conf import settings
from django.db.models.aggregates import Sum
class SQLSum(Sum):
@property
def sql_template(self):
if settings.DATABASES['default']['ENGINE'] == \
'django.db.backends.postgresql_psycopg2':
return '%(function)s(%(field)s::int)'
return '%(function)s(%(field)s)'
class BooleanSum(Sum):
def add_to_query(self, query, alias, col, source, is_summary):
aggregate = SQLSum(col, source=source, is_summary=is_summary,
**self.extra)
query.aggregates[alias] = aggregate
| Add support for Django 1.10+ | Add support for Django 1.10+
| Python | bsd-2-clause | Mibou/django-boolean-sum |
fdf559007b9596e8d075d3de7f6e9f27e8a24ed6 | rippl/legislature/api.py | rippl/legislature/api.py | from django.http import JsonResponse, HttpResponseBadRequest
from legislature.sunlight.district import DistrictMatcher
def find_district(request):
try:
latitude = request.GET['lat']
longitude = request.GET['lng']
except KeyError:
return HttpResponseBadRequest('Need both "lat" and "lng" query params')
matcher = DistrictMatcher()
district = matcher.find_district(latitude, longitude)
return JsonResponse({
'state': district.state.abbr,
'state_name': district.state.name,
'district': district.number,
'str': str(district)
})
| from django.http import JsonResponse, HttpResponseBadRequest
from legislature.sunlight.district import DistrictMatcher
def find_district(request):
try:
latitude = request.GET['lat']
longitude = request.GET['lng']
except KeyError:
return HttpResponseBadRequest('Need both "lat" and "lng" query params')
matcher = DistrictMatcher()
district = matcher.find_district(latitude, longitude)
return JsonResponse({
'state': district.state.abbr,
'state_name': district.state.name,
'district': district.number,
'district_id': district.id,
'str': str(district)
})
| Add district id to find_district response | Add district id to find_district response
| Python | mit | gnmerritt/dailyrippl,gnmerritt/dailyrippl,gnmerritt/dailyrippl,gnmerritt/dailyrippl |
c8068a60d4e0a2e4f3f272f5db19ced24fdd9b2a | glumpy/graphics/collection/__init__.py | glumpy/graphics/collection/__init__.py | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier. All rights reserved.
# Distributed under the terms of the new BSD License.
# -----------------------------------------------------------------------------
from . base_collection import BaseCollection
from . glyph_collection import GlyphCollection
from . path_collection import PathCollection
from . point_collection import PointCollection
from . agg_path_collection import AggPathCollection
from . marker_collection import MarkerCollection
from . unimarker_collection import UnimarkerCollection
from . agg_solid_segment_collection import AggSolidSegmentCollection
| # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier. All rights reserved.
# Distributed under the terms of the new BSD License.
# -----------------------------------------------------------------------------
from . base_collection import BaseCollection
from . glyph_collection import GlyphCollection
from . path_collection import PathCollection
from . point_collection import PointCollection
# from . agg_path_collection import AggPathCollection
# from . marker_collection import MarkerCollection
from . unimarker_collection import UnimarkerCollection
from . agg_solid_segment_collection import AggSolidSegmentCollection
| Fix wrong import in collection | Fix wrong import in collection
| Python | bsd-3-clause | glumpy/glumpy,glumpy/glumpy,duyuan11/glumpy,duyuan11/glumpy |
1849a7ce4e706c8f81a6f3f5b01e0f16c3beb35d | sahgutils/io/__init__.py | sahgutils/io/__init__.py | """Provides convenience utilities for assorted data files.
This package provides a means of organizing the code developed
at UKZN for handling the dataflow and processing of information
for the WRC funded research project K5-1683 "Soil Moisture from
Space".
The interface isn't stable yet so be prepared to update your code
on a regular basis...
"""
| """Provides convenience utilities for assorted data files.
This package provides a means of organizing the code developed
at UKZN for handling the dataflow and processing of information
for the WRC funded research project K5-1683 "Soil Moisture from
Space".
The interface isn't stable yet so be prepared to update your code
on a regular basis...
"""
| Change to Unix line endings | REF: Change to Unix line endings
| Python | bsd-3-clause | sahg/SAHGutils |
eaff795bddb0e07f4ad4e4c9277c5c0f6f199380 | salt/beacons/__init__.py | salt/beacons/__init__.py | # -*- coding: utf-8 -*-
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
ret.append({'tag': tag, 'data': data})
return ret
| # -*- coding: utf-8 -*-
'''
This package contains the loader modules for the salt streams system
'''
# Import salt libs
import salt.loader
class Beacon(object):
'''
This class is used to eveluate and execute on the beacon system
'''
def __init__(self, opts):
self.opts = opts
self.beacons = salt.loader.beacons(opts)
def process(self, config):
'''
Process the configured beacons
The config must be a dict and looks like this in yaml
code_block:: yaml
beacons:
inotify:
- /etc/fstab
- /var/cache/foo/*
'''
ret = []
for mod in config:
fun_str = '{0}.beacon'.format(mod)
if fun_str in self.beacons:
tag = 'salt/beacon/{0}/{1}/'.format(self.opts['id'], mod)
raw = self.beacons[fun_str](config[mod])
for data in raw:
if 'tag' in data:
tag += data.pop('tag')
if not 'id' in data:
data['id'] = self.opts['id']
ret.append({'tag': tag, 'data': data})
return ret
| Add id tot he beacon event dataset | Add id tot he beacon event dataset
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt |
309439f65bb668aba85a31a46b2633a46ee55777 | apps/careeropportunity/migrations/0001_initial.py | apps/careeropportunity/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('companyprofile', '0001_squashed_0003_company_image'),
]
operations = [
migrations.CreateModel(
name='CareerOpportunity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='tittel')),
('ingress', models.CharField(max_length=250, verbose_name='ingress')),
('description', models.TextField(verbose_name='beskrivelse')),
('start', models.DateTimeField(verbose_name='aktiv fra')),
('end', models.DateTimeField(verbose_name='aktiv til')),
('featured', models.BooleanField(default=False, verbose_name='fremhevet')),
('company', models.ForeignKey(related_name='company', to='companyprofile.Company')),
],
options={
'verbose_name': 'karrieremulighet',
'verbose_name_plural': 'karrieremuligheter',
'permissions': (('view_careeropportunity', 'View CareerOpportunity'),),
},
bases=(models.Model,),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('companyprofile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CareerOpportunity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100, verbose_name='tittel')),
('ingress', models.CharField(max_length=250, verbose_name='ingress')),
('description', models.TextField(verbose_name='beskrivelse')),
('start', models.DateTimeField(verbose_name='aktiv fra')),
('end', models.DateTimeField(verbose_name='aktiv til')),
('featured', models.BooleanField(default=False, verbose_name='fremhevet')),
('company', models.ForeignKey(related_name='company', to='companyprofile.Company')),
],
options={
'verbose_name': 'karrieremulighet',
'verbose_name_plural': 'karrieremuligheter',
'permissions': (('view_careeropportunity', 'View CareerOpportunity'),),
},
bases=(models.Model,),
),
]
| Revert "Change careeropportunity migration dep" | Revert "Change careeropportunity migration dep"
This reverts commit 60fdfab7e3b557e46276c225ff159f5773930525.
| Python | mit | dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4 |
437623aee55fd68683126bd6852df52379837eaa | bash_command.py | bash_command.py | import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, _ = run_bash_for_output(final_command)
print(final_command, " ", output)
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": output})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
| import sublime, sublime_plugin
import os
from .common.utils import run_bash_for_output
from .common.utils import git_path_for_window
last_command = ""
class RunBash(sublime_plugin.WindowCommand):
def run(self):
global last_command
window = self.window
view = window.active_view()
if view.file_name() is not None:
path = os.path.join(os.path.dirname(view.file_name()), '')
window.show_input_panel(
'Bash:',
last_command,
lambda command: (
self.run_bash(path, command)
),
None,
None
)
def run_bash(self, path, command):
global last_command
last_command = command
if command.startswith('$'):
command = command[1:]
path = git_path_for_window(self.window)
final_command = "cd '{0}'; {1}".format(path, command)
output, err = run_bash_for_output(final_command)
new_content = output + '\n' + (100 * '=') + '\n' + err
results_view = self.window.new_file()
results_view.set_scratch(True)
results_view.set_name("BashOutput")
# deps: this is from utilities.py
results_view.run_command('replace_content', {"new_content": new_content})
results_view.sel().clear()
results_view.sel().add(sublime.Region(0, 0))
self.window.focus_view(results_view)
| Print both output + error for bash command | Print both output + error for bash command
| Python | mit | ktuan89/sublimeplugins |
f070883acc64699c1673f1c1e3f81029f6dea4c2 | Python/Product/PythonTools/ptvsd/setup.py | Python/Product/PythonTools/ptvsd/setup.py | #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.1.0a1',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='[email protected]',
url='https://pytools.codeplex.com/',
classifiers=[
'Development Status :: 3 - Alpha',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
| #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
setup(name='ptvsd',
version='2.1.0b1',
description='Python Tools for Visual Studio remote debugging server',
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='[email protected]',
url='https://pytools.codeplex.com/',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License'],
packages=['ptvsd']
)
| Update ptvsd package metadata for 2.1 beta. | Update ptvsd package metadata for 2.1 beta.
| Python | apache-2.0 | crwilcox/PTVS,bolabola/PTVS,modulexcite/PTVS,modulexcite/PTVS,juanyaw/PTVS,xNUTs/PTVS,denfromufa/PTVS,DEVSENSE/PTVS,modulexcite/PTVS,bolabola/PTVS,int19h/PTVS,fjxhkj/PTVS,msunardi/PTVS,DEVSENSE/PTVS,mlorbetske/PTVS,MetSystem/PTVS,huguesv/PTVS,christer155/PTVS,xNUTs/PTVS,gomiero/PTVS,christer155/PTVS,fivejjs/PTVS,MetSystem/PTVS,crwilcox/PTVS,juanyaw/PTVS,ChinaQuants/PTVS,xNUTs/PTVS,gomiero/PTVS,huguesv/PTVS,xNUTs/PTVS,crwilcox/PTVS,fjxhkj/PTVS,DinoV/PTVS,bolabola/PTVS,jkorell/PTVS,int19h/PTVS,juanyaw/PTVS,juanyaw/PTVS,jkorell/PTVS,zooba/PTVS,msunardi/PTVS,fivejjs/PTVS,alanch-ms/PTVS,dut3062796s/PTVS,gomiero/PTVS,Habatchii/PTVS,int19h/PTVS,MetSystem/PTVS,dut3062796s/PTVS,crwilcox/PTVS,DinoV/PTVS,fivejjs/PTVS,christer155/PTVS,Habatchii/PTVS,xNUTs/PTVS,jkorell/PTVS,Microsoft/PTVS,Microsoft/PTVS,juanyaw/PTVS,crwilcox/PTVS,gilbertw/PTVS,Microsoft/PTVS,crwilcox/PTVS,fjxhkj/PTVS,jkorell/PTVS,msunardi/PTVS,modulexcite/PTVS,DEVSENSE/PTVS,fjxhkj/PTVS,MetSystem/PTVS,huguesv/PTVS,msunardi/PTVS,MetSystem/PTVS,dut3062796s/PTVS,alanch-ms/PTVS,dut3062796s/PTVS,christer155/PTVS,fivejjs/PTVS,mlorbetske/PTVS,DEVSENSE/PTVS,alanch-ms/PTVS,DEVSENSE/PTVS,DinoV/PTVS,bolabola/PTVS,xNUTs/PTVS,int19h/PTVS,mlorbetske/PTVS,juanyaw/PTVS,int19h/PTVS,ChinaQuants/PTVS,zooba/PTVS,huguesv/PTVS,DinoV/PTVS,ChinaQuants/PTVS,fjxhkj/PTVS,Microsoft/PTVS,denfromufa/PTVS,christer155/PTVS,zooba/PTVS,gilbertw/PTVS,DinoV/PTVS,fivejjs/PTVS,denfromufa/PTVS,Habatchii/PTVS,gomiero/PTVS,mlorbetske/PTVS,MetSystem/PTVS,DinoV/PTVS,Habatchii/PTVS,bolabola/PTVS,alanch-ms/PTVS,denfromufa/PTVS,alanch-ms/PTVS,modulexcite/PTVS,mlorbetske/PTVS,jkorell/PTVS,gilbertw/PTVS,Habatchii/PTVS,mlorbetske/PTVS,ChinaQuants/PTVS,Habatchii/PTVS,msunardi/PTVS,christer155/PTVS,zooba/PTVS,huguesv/PTVS,alanch-ms/PTVS,gilbertw/PTVS,jkorell/PTVS,fivejjs/PTVS,bolabola/PTVS,denfromufa/PTVS,huguesv/PTVS,msunardi/PTVS,fjxhkj/PTVS,denfromufa/PTVS,ChinaQuants/PTVS,Microsoft/PTVS,dut3062796s/PTVS,dut3062796s/PTVS,Microsoft/PTVS,zooba/PTVS,modulexcite/PTVS,zooba/PTVS,ChinaQuants/PTVS,gilbertw/PTVS,DEVSENSE/PTVS,gomiero/PTVS,gomiero/PTVS,int19h/PTVS,gilbertw/PTVS |
6a0c619d743d57a1ff1684144c148c9b8cc9a0be | day-10/solution.py | day-10/solution.py | def lookandsay(line):
p = None
n = 0
result = []
for c in line:
if n > 0 and p is not c:
result.append(str(n))
result.append(p)
n = 0
p = c
n += 1
result.append(str(n))
result.append(p)
return ''.join(result)
line = "1321131112"
for x in range(40):
line = lookandsay(line)
print "40:", len(line)
for x in range(10):
line = lookandsay(line)
print "50:", len(line)
| import itertools
def lookandsay(line):
return ''.join([str(len(list(it))) + c for c, it in itertools.groupby(line)])
line = "1321131112"
for x in range(40):
line = lookandsay(line)
print "40:", len(line)
for x in range(10):
line = lookandsay(line)
print "50:", len(line)
| Reimplement day 10 using itertools combine. | Reimplement day 10 using itertools combine.
Now it DOES run using pypy, and is faster again.
| Python | mit | bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode,bertptrs/adventofcode |
53681ae30bdaccce2321601f1ebab09b4c572cc9 | sqlalchemy_mptt/__init__.py | sqlalchemy_mptt/__init__.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2014 uralbash <[email protected]>
#
# Distributed under terms of the MIT license.
from .mixins import BaseNestedSets
__version__ = "0.0.8"
__mixins__ = [BaseNestedSets]
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2014 uralbash <[email protected]>
#
# Distributed under terms of the MIT license.
from sqlalchemy.orm import mapper
from .mixins import BaseNestedSets
from .events import TreesManager
__version__ = "0.0.8"
__mixins__ = [BaseNestedSets]
__all__ = ['BaseNestedSets', 'mptt_sessionmaker']
tree_manager = TreesManager(BaseNestedSets)
tree_manager.register_mapper(mapper)
mptt_sessionmaker = tree_manager.register_factory
| Make a default tree manager importable from the package. | Make a default tree manager importable from the package.
| Python | mit | uralbash/sqlalchemy_mptt,ITCase/sqlalchemy_mptt,ITCase/sqlalchemy_mptt,uralbash/sqlalchemy_mptt |
f032501126e7bb6d86441e38112c6bdf5035c62e | icekit/search_indexes.py | icekit/search_indexes.py | from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
| from fluent_pages.pagetypes.flatpage.models import FlatPage
from fluent_pages.pagetypes.fluentpage.models import FluentPage
from haystack import indexes
from django.conf import settings
# Optional search indexes which can be used with the default FluentPage and FlatPage models.
if getattr(settings, 'ICEKIT_USE_SEARCH_INDEXES', True):
class FluentPageIndex(indexes.SearchIndex, indexes.Indexable):
"""
Search index for a fluent page.
"""
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
publication_date = indexes.DateTimeField(model_attr='publication_date', null=True)
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FluentPage
def index_queryset(self, using=None):
"""
Queryset appropriate for this object to allow search for.
"""
return self.get_model().objects.published()
class FlatPageIndex(FluentPageIndex):
"""
Search index for a flat page.
As everything except the model is the same as for a FluentPageIndex
we shall subclass it and overwrite the one part we need.
"""
@staticmethod
def get_model():
"""
Get the model for the search index.
"""
return FlatPage
| Add setting to turn of search indexes. | Add setting to turn of search indexes.
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit |
44843e5c719b18a9f45a60799d889a4a51dac91d | site/cgi-bin/csv-columns.py | site/cgi-bin/csv-columns.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Give back the columns of a CSV and the in
# http://www.tutorialspoint.com/python/python_cgi_programming.htm
import cgi
import csv
import sys
import codecs
import cgitb
CSV_DIR = '../csv/' # CSV upload directory
# UTF-8 hack
# from http://stackoverflow.com/a/11764727
reload(sys)
sys.setdefaultencoding('utf-8')
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
# If you need input too, read from char_stream as you would sys.stdin
#char_stream = codecs.getreader('utf-8')(sys.stdin)
# python 2.x sys.stdout.encoding by default is None
# better option would be setting os.environ.get('PYTHONIOENCODING') to UTF-8
cgitb.enable() # pretty debugging
form = cgi.FieldStorage()
filename = form.getvalue('dataset')
f = open(CSV_DIR + filename, 'r')
r = csv.reader(f, dialect=csv.excel) # Create CSV row reader
col_names = next(r)
print '''\
Status: 200\r
Content-Type: application/json;charset=UTF-8\r
\r
{ "columns" : [%s] }\r
''' % ( '"' + '","'.join(col_names).encode('utf-8') + '"', )
| #!/usr/bin/python
# -*- coding: utf-8 -*-
# Give back the columns of a CSV and the in
# http://www.tutorialspoint.com/python/python_cgi_programming.htm
import cgi
import csv
import sys
import codecs
import cgitb
CSV_DIR = '../csv/' # CSV upload directory
# UTF-8 hack
# from http://stackoverflow.com/a/11764727
reload(sys)
sys.setdefaultencoding('utf-8')
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
# If you need input too, read from char_stream as you would sys.stdin
#char_stream = codecs.getreader('utf-8')(sys.stdin)
# python 2.x sys.stdout.encoding by default is None
# better option would be setting os.environ.get('PYTHONIOENCODING') to UTF-8
cgitb.enable() # pretty debugging
form = cgi.FieldStorage()
filename = form.getvalue('dataset')
f = open(CSV_DIR + filename, 'r')
r = csv.reader(f, dialect=csv.excel) # Create CSV row reader
col_names = next(r)
print '''\
Status: 200\r
Content-Type: application/json;charset=UTF-8\r
Cache-Control: public, max-age=3600\r
\r
{ "columns" : [%s] }\r
''' % ( '"' + '","'.join(col_names).encode('utf-8') + '"', )
| Add cache header for columns | Add cache header for columns
| Python | agpl-3.0 | alejosanchez/CSVBenford,alejosanchez/CSVBenford |
cca4c42e07ad7fc0c3e96284a6bfbf67d59860cb | sites/cozylan/config_dev.py | sites/cozylan/config_dev.py | # Examplary development configuration for the "CozyLAN" demo site
DEBUG = True
SECRET_KEY = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://byceps:[email protected]/byceps'
REDIS_URL = 'redis://127.0.0.1:6379/0'
APP_MODE = 'site'
SITE_ID = 'cozylan'
MAIL_DEBUG = True
MAIL_DEFAULT_SENDER = 'CozyLAN <[email protected]>'
MAIL_SUPPRESS_SEND = False
MAIL_TRANSPORT = 'logging'
DEBUG_TOOLBAR_ENABLED = True
STYLE_GUIDE_ENABLED = True
| # Examplary development configuration for the "CozyLAN" demo site
DEBUG = True
SECRET_KEY = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
SESSION_COOKIE_SECURE = False
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://byceps:[email protected]/byceps'
REDIS_URL = 'redis://127.0.0.1:6379/0'
APP_MODE = 'site'
SITE_ID = 'cozylan'
MAIL_DEBUG = True
MAIL_SUPPRESS_SEND = False
MAIL_TRANSPORT = 'logging'
DEBUG_TOOLBAR_ENABLED = True
STYLE_GUIDE_ENABLED = True
| Remove default email sender from CozyLAN config | Remove default email sender from CozyLAN config
Brand-specific emails should always use the sender from the
brand-specific email configuration.
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps |
08dc0ce7c44d0149b443261ff6d3708e28a928e7 | src/meshparser/__init__.py | src/meshparser/__init__.py | from pkg_resources import resource_string
version = resource_string(__name__, 'version.txt').strip()
__version__ = version
| from pkg_resources import resource_string
version = resource_string(__name__, 'version.txt').strip()
__version__ = version.decode('utf-8')
| Add decode to version read from pkg_resources. | Add decode to version read from pkg_resources.
| Python | apache-2.0 | ABI-Software/MeshParser |
c67ea7029a8c8b9748c401dc4852f98f8bfc96a1 | opencontrail_netns/vrouter_control.py | opencontrail_netns/vrouter_control.py | import sys
import getopt
import logging
import socket
from contrail_lib import rpc_client_instance, uuid_from_string
import nova_contrail_vif.gen_py.instance_service
def add_interface(interface_name, vmi, vm, mac):
from nova_contrail_vif.gen_py.instance_service import ttypes
data = ttypes.Port(
uuid_from_string(vmi),
uuid_from_string(vm),
interface_name,
'0.0.0.0',
[0] * 16,
mac)
logging.debug(data)
rpc = rpc_client_instance()
if not rpc:
return
try:
rpc.AddPort([data])
except socket.error:
logging.error('RPC failure')
def del_interface(vmi):
rpc = rpc_client_instance()
if not rpc:
return
try:
rpc.DeletePort(uuid_from_string(vmi))
except socket.error:
logging.error('RPC failure')
logging.info('Deleted virtual-machine-interface uuid = ' + vmi)
def interface_register(vm, vmi, iface_name):
mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0]
add_interface(iface_name, vmi.uuid, vm.uuid, mac)
def interface_unregister(vmi_uuid):
del_interface(vmi_uuid)
| import sys
import getopt
import logging
import socket
from contrail_lib import rpc_client_instance, uuid_from_string
import contrail_vrouter_api.gen_py.instance_service
def add_interface(interface_name, vmi, vm, mac):
from contrail_vrouter_api.gen_py.instance_service import ttypes
data = ttypes.Port(
uuid_from_string(vmi),
uuid_from_string(vm),
interface_name,
'0.0.0.0',
[0] * 16,
mac)
logging.debug(data)
rpc = rpc_client_instance()
if not rpc:
return
try:
rpc.AddPort([data])
except socket.error:
logging.error('RPC failure')
def del_interface(vmi):
rpc = rpc_client_instance()
if not rpc:
return
try:
rpc.DeletePort(uuid_from_string(vmi))
except socket.error:
logging.error('RPC failure')
logging.info('Deleted virtual-machine-interface uuid = ' + vmi)
def interface_register(vm, vmi, iface_name):
mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0]
add_interface(iface_name, vmi.uuid, vm.uuid, mac)
def interface_unregister(vmi_uuid):
del_interface(vmi_uuid)
| Use contrail_vrouter_api instead of nova_contrail_vif. | Use contrail_vrouter_api instead of nova_contrail_vif.
| Python | apache-2.0 | tonyliu0592/opencontrail-netns,DreamLab/opencontrail-netns,pedro-r-marques/opencontrail-netns |
670bbf8758e63cfeafc1de6f9330403dec2517c2 | astrobin_apps_platesolving/utils.py | astrobin_apps_platesolving/utils.py | # Python
import urllib2
# Django
from django.conf import settings
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
def getFromStorage(image, alias):
def encoded(path):
return urllib2.quote(path.encode('utf-8'))
url = image.thumbnail(alias)
if "://" in url:
# We are getting the full path and must only encode the part after the protocol
# (we assume that the hostname is ASCII)
protocol, path = url.split("://")
url = protocol + encoded(path)
else:
url = settings.BASE_URL + encoded(url)
headers = { 'User-Agent' : 'Mozilla/5.0' }
req = urllib2.Request(url, None, headers)
img = NamedTemporaryFile(delete = True)
img.write(urllib2.urlopen(req).read())
img.flush()
img.seek(0)
return File(img)
| # Python
import urllib2
# Django
from django.conf import settings
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
def getFromStorage(image, alias):
url = image.thumbnail(alias)
if "://" in url:
url = url.split('://')[1]
else:
url = settings.BASE_URL + url
url = 'http://' + urllib2.quote(url.encode('utf-8'))
headers = { 'User-Agent' : 'Mozilla/5.0' }
req = urllib2.Request(url, None, headers)
img = NamedTemporaryFile(delete = True)
img.write(urllib2.urlopen(req).read())
img.flush()
img.seek(0)
return File(img)
| Revert "Fix plate-solving on local development mode" | Revert "Fix plate-solving on local development mode"
This reverts commit 40897be402bd05ed5fb53e116f03d2d954720245.
| Python | agpl-3.0 | astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin |
aaa6b6683e4ce46ec672899802c035c592d50b0e | app/initial_tables.py | app/initial_tables.py | from tables import engine
def create_tables():
"""
Create tables the lazy way... with raw SQL.
"""
conn = engine.raw_connection()
cur = conn.cursor()
cur.execute(
"""
CREATE TABLE file_upload(
document_name TEXT
, time_uploaded TEXT DEFAULT now()
, filename TEXT NOT NULL
, word_counts JSON NOT NULL
, PRIMARY KEY(document_name, time_uploaded)
);
"""
)
conn.commit()
if __name__ == '__main__':
create_tables()
| from tables import engine
def create_tables():
"""
Create tables the lazy way... with raw SQL.
"""
conn = engine.raw_connection()
cur = conn.cursor()
cur.execute(
"""
CREATE TABLE file_upload_meta(
document_name TEXT NOT NULL
, document_slug TEXT NOT NULL
, time_uploaded TEXT NOT NULL DEFAULT now()
, filename TEXT NOT NULL
, word_counts JSON NOT NULL
, PRIMARY KEY(document_slug, time_uploaded)
);
"""
)
conn.commit()
if __name__ == '__main__':
create_tables()
| Add slug field to file upload meta table, rename table | Add slug field to file upload meta table, rename table
| Python | mit | sprin/heroku-tut |
5559e9f429e9019959f1c79fbc2a7f82c12f91c4 | src/hpp/utils.py | src/hpp/utils.py | # Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <[email protected]>
import os
import subprocess
import time
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
self.process.kill()
| # Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <[email protected]>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
self.process.communicate()
| Fix how hppcorbaserver is killed in ServerManager | Fix how hppcorbaserver is killed in ServerManager
| Python | bsd-2-clause | humanoid-path-planner/hpp-corbaserver,humanoid-path-planner/hpp-corbaserver |
503cc29b03694d2b8daaf3427f2129312cfaf328 | beer/models.py | beer/models.py | from django.db import models
BEER_CHOICES = (
(1, 'Domestic'),
(2, 'Import'),
)
class Beer(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
brewery = models.ForeignKey('Brewery')
locality = models.IntegerField(choices=BEER_CHOICES)
description = models.TextField(blank=True)
def __unicode__(self):
return self.name
class Brewery(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
description = models.TextField(blank=True)
def __unicode__(self):
return self.name | from django.db import models
BEER_CHOICES = (
(1, 'Domestic'),
(2, 'Import'),
)
class Beer(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
brewery = models.ForeignKey('Brewery')
locality = models.IntegerField(choices=BEER_CHOICES)
description = models.TextField(blank=True)
def __unicode__(self):
return self.name
class Brewery(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
description = models.TextField(blank=True)
def __str__(self):
return self.name | Change the __unicode__ to __str__ | Change the __unicode__ to __str__
| Python | mit | OckiFals/crud-django,OckiFals/crud-django |
c0a74ce4110d295b3662066e4d08c4ab65fb0905 | bills/views.py | bills/views.py |
from django.shortcuts import render, redirect
from bills.utils import get_all_subjects, get_all_locations
from opencivicdata.models import Bill
def bill_list(request):
subjects = get_all_subjects()
if request.POST.getlist('bill_subjects'):
filter_subjects = request.POST.getlist('bill_subjects')
all_bills = Bill.objects.filter(subject__in=filter_subjects)
else:
all_bills = Bill.objects.all()
details = []
for bill in all_bills:
bill_detail = {}
bill_detail['title'] = bill.title
bill_detail['from_organization'] = bill.from_organization.name
bill_detail['actions'] = []
bill_detail['sponsorships'] = []
for action in bill.actions.all():
bill_detail['actions'].append({'description': action.description, 'date': action.date})
for sponsorship in bill.sponsorships.all():
bill_detail['sponsorships'].append({
'sponsor': sponsorship.name,
'id': sponsorship.id,
'primary': sponsorship.primary
})
details.append(bill_detail)
if request.method == 'POST':
with transaction.atomic():
filter_subjects = request.POST.getlist('bill_subjects')
return redirect('.')
return render(
request,
'bills/all.html',
{'bills': details, 'subjects': subjects}
)
|
from django.db import transaction
from django.shortcuts import render, redirect
from preferences.views import _mark_selected
from bills.utils import get_all_subjects, get_all_locations
from opencivicdata.models import Bill
def bill_list(request):
subjects = get_all_subjects()
if request.POST.getlist('bill_subjects'):
filter_subjects = request.POST.getlist('bill_subjects')
all_bills = Bill.objects.filter(subject__contains=filter_subjects)
else:
filter_subjects = []
all_bills = Bill.objects.all()
subjects = _mark_selected(subjects, filter_subjects)
details = []
for bill in all_bills:
bill_detail = {}
bill_detail['title'] = bill.title
bill_detail['from_organization'] = bill.from_organization.name
bill_detail['actions'] = []
bill_detail['sponsorships'] = []
for action in bill.actions.all():
bill_detail['actions'].append({'description': action.description, 'date': action.date})
for sponsorship in bill.sponsorships.all():
bill_detail['sponsorships'].append({
'sponsor': sponsorship.name,
'id': sponsorship.id,
'primary': sponsorship.primary
})
details.append(bill_detail)
return render(
request,
'bills/all.html',
{'bills': details, 'subjects': subjects}
)
| Mark pre-selected topics on form | Mark pre-selected topics on form
| Python | mit | jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot |
d78f1edd09b0d8c097d7fe79eb322bdc310e5604 | bsuite/baselines/jax/boot_dqn/run_test.py | bsuite/baselines/jax/boot_dqn/run_test.py | # python3
# pylint: disable=g-bad-file-header
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Basic test coverage for agent training."""
from absl import flags
from absl.testing import absltest
from absl.testing import parameterized
from bsuite import sweep
from bsuite.baselines.jax.boot_dqn import run
FLAGS = flags.FLAGS
class RunTest(parameterized.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
FLAGS.num_ensemble = 8
FLAGS.num_episodes = 2
FLAGS.logging_mode = 'terminal'
@parameterized.parameters(*sweep.TESTING)
def test_run(self, bsuite_id: str):
run.run(bsuite_id)
if __name__ == '__main__':
absltest.main()
| # python3
# pylint: disable=g-bad-file-header
# Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Basic test coverage for agent training."""
from absl import flags
from absl.testing import absltest
from absl.testing import parameterized
from bsuite import sweep
from bsuite.baselines.jax.boot_dqn import run
FLAGS = flags.FLAGS
class RunTest(parameterized.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
FLAGS.num_ensemble = 2
FLAGS.num_episodes = 5
FLAGS.logging_mode = 'terminal'
@parameterized.parameters(*sweep.TESTING)
def test_run(self, bsuite_id: str):
run.run(bsuite_id)
if __name__ == '__main__':
absltest.main()
| Tweak unit tests parameters for boot_dqn. | Tweak unit tests parameters for boot_dqn.
We can speed this up further by setting num_ensemble = 2 (anything >1 gives full coverage).
Setting num_episodes = 5 for consistency with other baseline unit tests.
PiperOrigin-RevId: 307000268
Change-Id: Id8c757c884860561e92c6d325264c75c51d0e0c4
| Python | apache-2.0 | deepmind/bsuite,deepmind/bsuite |
c5496fddccffd2f16c0b4a140506b9d577d50b61 | eventlog/models.py | eventlog/models.py | from django.conf import settings
from django.db import models
from django.utils import timezone
import jsonfield
from .signals import event_logged
class Log(models.Model):
user = models.ForeignKey(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True,
on_delete=models.SET_NULL
)
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
action = models.CharField(max_length=50, db_index=True)
extra = jsonfield.JSONField()
class Meta:
ordering = ["-timestamp"]
def log(user, action, extra=None):
if (user is not None and not user.is_authenticated()):
user = None
if extra is None:
extra = {}
event = Log.objects.create(user=user, action=action, extra=extra)
event_logged.send(sender=Log, event=event)
return event
| from django.conf import settings
from django.db import models
from django.utils import timezone
import jsonfield
from .signals import event_logged
class Log(models.Model):
user = models.ForeignKey(
getattr(settings, "AUTH_USER_MODEL", "auth.User"),
null=True,
on_delete=models.SET_NULL
)
timestamp = models.DateTimeField(default=timezone.now, db_index=True)
action = models.CharField(max_length=50, db_index=True)
extra = jsonfield.JSONField()
@property
def template_fragment_name(self):
return "eventlog/{}.html".format(self.action.lower())
class Meta:
ordering = ["-timestamp"]
def log(user, action, extra=None):
if (user is not None and not user.is_authenticated()):
user = None
if extra is None:
extra = {}
event = Log.objects.create(user=user, action=action, extra=extra)
event_logged.send(sender=Log, event=event)
return event
| Add property to provide template fragment name | Add property to provide template fragment name
| Python | mit | jawed123/pinax-eventlog,pinax/pinax-eventlog,KleeTaurus/pinax-eventlog,rosscdh/pinax-eventlog |
5a45840e81d612e1f743ad063fd32da4d19354d4 | cacheops/signals.py | cacheops/signals.py | import django.dispatch
cache_read = django.dispatch.Signal(providing_args=["func", "hit"])
cache_invalidated = django.dispatch.Signal(providing_args=["obj_dict"])
| import django.dispatch
cache_read = django.dispatch.Signal() # args: func, hit
cache_invalidated = django.dispatch.Signal() # args: obj_dict
| Stop using Signal(providing_args) deprected in Django 4.0 | Stop using Signal(providing_args) deprected in Django 4.0
Closes #393
| Python | bsd-3-clause | Suor/django-cacheops |
b3b67fe0e68423fc2f85bccf1f20acdb779a38ba | pylxd/deprecated/tests/utils.py | pylxd/deprecated/tests/utils.py | # Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pylxd import api
from pylxd import exceptions as lxd_exceptions
def upload_image(image):
alias = "{}/{}/{}/{}".format(
image["os"], image["release"], image["arch"], image["variant"]
)
lxd = api.API()
imgs = api.API(host="images.linuxcontainers.org")
d = imgs.alias_show(alias)
meta = d[1]["metadata"]
tgt = meta["target"]
try:
lxd.alias_update(meta)
except lxd_exceptions.APIError as ex:
if ex.status_code == 404:
lxd.alias_create(meta)
return tgt
def delete_image(image):
lxd = api.API()
lxd.image_delete(image)
| # Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pylxd import api
def delete_image(image):
lxd = api.API()
lxd.image_delete(image)
| Remove unused testing utility function | Remove unused testing utility function
Signed-off-by: Dougal Matthews <[email protected]>
| Python | apache-2.0 | lxc/pylxd,lxc/pylxd |
57bb37d7579620005a49613ff90f0a2eec55a77e | backend/offers_web.py | backend/offers_web.py | import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count()
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
| import falcon
import json
import rethinkdb as r
MAX_OFFERS = 100
class OfferListResource:
def __init__(self):
self._db = r.connect('localhost', 28015)
def on_get(self, req, resp):
"""Returns all offers available"""
try:
limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1)))
except ValueError as e:
raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page")
if page < 1:
raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page")
elif limit < 1:
raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page")
else:
cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db)
count = r.db('voyageavecmoi').table('offers').count().run(self._db)
resp.body = json.dumps(list(cursor))
resp.append_header('X-Max-Elements', count)
app = falcon.API()
app.add_route('/api/offers', OfferListResource())
| Fix max elements in header | Fix max elements in header
| Python | agpl-3.0 | jilljenn/voyageavecmoi,jilljenn/voyageavecmoi,jilljenn/voyageavecmoi |
cd3b686204e48412765c633f61f02a6166141125 | citools/__init__.py | citools/__init__.py | """
CI tools is a collection of small configurations aimed to ease setting up
complete CI system, targettet on django apps.
"""
VERSION = (0, 0, 1, 0)
__version__ = VERSION
__versionstr__ = '.'.join(map(str, VERSION))
| """
CI tools is a collection of small configurations aimed to ease setting up
complete CI system, targettet on django apps.
"""
VERSION = (0, 1, 0)
__version__ = VERSION
__versionstr__ = '.'.join(map(str, VERSION))
| Use standard three-digits for now | Use standard three-digits for now
| Python | bsd-3-clause | ella/citools,ella/citools |
fac7e7d8759aab7e2bea666e55d71e35da45c334 | groundstation/gref.py | groundstation/gref.py | import os
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
raise
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
| import os
class Gref(object):
def __init__(self, store, channel, identifier):
self.store = store
self.channel = channel.replace("/", "_")
self.identifier = identifier
self._node_path = os.path.join(self.store.gref_path(),
self.channel,
self.identifier)
def __str__(self):
return "%s/%s" % (self.channel, self.identifier)
def exists(self):
return os.path.exists(self._node_path)
def tips(self):
return os.listdir(self._node_path)
def node_path(self):
if not self.exists():
os.makedirs(self._node_path)
return self._node_path
def write_tip(self, tip, signature):
tip_path = self.tip_path(tip)
open(tip_path, 'a').close()
fh = open(tip_path, 'r+')
fh.seek(0)
fh.write(signature)
fh.truncate()
fh.close()
def tip_path(self, tip):
return os.path.join(self.node_path(), tip)
def __iter__(self):
return os.listdir(self.node_path()).__iter__()
def remove_tip(self, tip):
try:
os.unlink(os.path.join(self.tip_path(tip)))
except:
raise
def as_dict(self):
return {
"channel": self.channel,
"identifier": self.identifier,
"node_path": self._node_path
}
| Implement Gref.tips() to fetch it's tips. | Implement Gref.tips() to fetch it's tips.
| Python | mit | richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation |
6e2cbb2da770d73e12dfae7d36fd6f1ef00c4ed7 | jpa/eclipselink.jpa.test/resource/weblogic/wls_start.py | jpa/eclipselink.jpa.test/resource/weblogic/wls_start.py | ############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
| ############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
| Change start-weblogic-server using jdk as default. | Change start-weblogic-server using jdk as default.
Code reviewed by Edwin Tang
| Python | epl-1.0 | gameduell/eclipselink.runtime,gameduell/eclipselink.runtime,gameduell/eclipselink.runtime,gameduell/eclipselink.runtime |
06a19b5c693b2b5b808f8e7b00136bef6b1a04c3 | base/relengapi/app.py | base/relengapi/app.py | from flask import current_app
from flask import Flask
from flask import g
from flask import jsonify
from flask import redirect
from flask import url_for
from relengapi import celery
from relengapi import db
import pkg_resources
def create_app(cmdline=False):
app = Flask('relengapi')
app.config.from_envvar('RELENG_API_SETTINGS')
# get blueprints from pkg_resources
for ep in pkg_resources.iter_entry_points('relengapi_blueprints'):
if cmdline:
print " * registering blueprint", ep.name
app.register_blueprint(ep.load(), url_prefix='/%s' % ep.name)
# add the necessary components to the app
app.db = db.make_db(app)
app.celery = celery.make_celery(app)
@app.before_request
def add_db():
g.db = app.db
@app.route('/')
def root():
return redirect(url_for('docs.root'))
@app.route('/meta')
def meta():
"API: Metadata about this RelengAPI instance"
meta = {}
meta['blueprints'] = current_app.blueprints.keys()
return jsonify(meta)
return app
| import os
from flask import current_app
from flask import Flask
from flask import g
from flask import jsonify
from flask import redirect
from flask import url_for
from relengapi import celery
from relengapi import db
import pkg_resources
def create_app(cmdline=False):
app = Flask('relengapi')
app.config.from_envvar('RELENG_API_SETTINGS')
# get blueprints from pkg_resources
for ep in pkg_resources.iter_entry_points('relengapi_blueprints'):
if cmdline:
print " * registering blueprint", ep.name
app.register_blueprint(ep.load(), url_prefix='/%s' % ep.name)
# set up a random session key if none is specified
if not app.config.get('SECRET_KEY'):
print " * WARNING: setting per-process session key"
app.secret_key = os.urandom(24)
# add the necessary components to the app
app.db = db.make_db(app)
app.celery = celery.make_celery(app)
@app.before_request
def add_db():
g.db = app.db
@app.route('/')
def root():
return redirect(url_for('docs.root'))
@app.route('/meta')
def meta():
"API: Metadata about this RelengAPI instance"
meta = {}
meta['blueprints'] = current_app.blueprints.keys()
return jsonify(meta)
return app
| Set a random session key if none is configured | Set a random session key if none is configured
| Python | mpl-2.0 | mozilla-releng/services,andrei987/services,srfraser/services,andrei987/services,srfraser/services,lundjordan/services,mozilla/build-relengapi,garbas/mozilla-releng-services,garbas/mozilla-releng-services,lundjordan/services,andrei987/services,andrei987/services,garbas/mozilla-releng-services,srfraser/services,hwine/build-relengapi,djmitche/build-relengapi,mozilla-releng/services,Callek/build-relengapi,garbas/mozilla-releng-services,lundjordan/build-relengapi,La0/mozilla-relengapi,lundjordan/build-relengapi,Callek/build-relengapi,mozilla-releng/services,mozilla/build-relengapi,lundjordan/services,La0/mozilla-relengapi,Callek/build-relengapi,hwine/build-relengapi,hwine/build-relengapi,mozilla-releng/services,mozilla/build-relengapi,lundjordan/build-relengapi,lundjordan/services,mozilla/build-relengapi,djmitche/build-relengapi,srfraser/services,Callek/build-relengapi,La0/mozilla-relengapi,lundjordan/build-relengapi,hwine/build-relengapi,La0/mozilla-relengapi,djmitche/build-relengapi,djmitche/build-relengapi |
1af37551cd8e68e84a25f77dc57e5c94b10d3b87 | btcx/common.py | btcx/common.py | from twisted.words.xish.utility import EventDispatcher
USER_AGENT = 'btcx-bot'
class ExchangeEvent(EventDispatcher):
def __init__(self, **kwargs):
EventDispatcher.__init__(self, **kwargs)
def listen(self, msg, cb):
event = "%s/%s" % (self.prefix, msg)
self.addObserver(event, cb)
def emit(self, msg, data=None):
event = "%s/%s" % (self.prefix, msg)
ret = self.dispatch(data, event)
def listen_once(self, msg, cb):
event = "%s/%s" % (self.prefix, msg)
self.addOnetimeObserver(event, cb)
def remove_listener(self, msg, cb):
event = "%s/%s" % (self.prefix, msg)
self.removeObserver(event, cb)
| import os
from twisted.words.xish.utility import EventDispatcher
USER_AGENT = 'btcx-bot'
class ExchangeEvent(EventDispatcher):
def __init__(self, **kwargs):
EventDispatcher.__init__(self, **kwargs)
self.listener = {}
def listen(self, msg, cb):
event = "%s/%s" % (self.prefix, msg)
self.addObserver(event, cb)
lid = self._gen_lid()
self.listener[lid] = (msg, cb)
return lid
def listen_once(self, msg, cb):
event = "%s/%s" % (self.prefix, msg)
self.addOnetimeObserver(event, cb)
lid = self._gen_lid()
self.listener[lid] = (msg, cb)
return lid
def emit(self, msg, data=None):
event = "%s/%s" % (self.prefix, msg)
ret = self.dispatch(data, event)
def remove(self, lid):
if lid in self.listener:
msg, cb = self.listener.pop(lid)
self._remove_listener(msg, cb)
else:
print "Listener %s not found." % lid
def _remove_listener(self, msg, cb):
event = "%s/%s" % (self.prefix, msg)
self.removeObserver(event, cb)
def _gen_lid(self):
return os.urandom(16)
| Support for removing listeners by means of a listener id. | Support for removing listeners by means of a listener id.
| Python | mit | knowitnothing/btcx,knowitnothing/btcx |
a419f6dcb7968d6af1e3ef8eae29b723d96b5fd2 | stayput/jinja2/__init__.py | stayput/jinja2/__init__.py | from jinja2 import Environment, FileSystemLoader
from stayput import Templater
class Jinja2Templater(Templater):
def __init__(self, site, *args, **kwargs):
self.site = site
self.env = Environment(loader=FileSystemLoader(site.templates_path))
def template(self, item):
return self.env.from_string(item.contents).render(site=self.site, item=item)
| from jinja2 import Environment, FileSystemLoader
from stayput import Templater
class Jinja2Templater(Templater):
def __init__(self, site, *args, **kwargs):
self.site = site
self.env = Environment(loader=FileSystemLoader(site.templates_path))
def template(self, item, site, *args, **kwargs):
return self.env.from_string(item.contents).render(site=self.site, item=item)
| Update for stayput master and ensure forward compatibility | Update for stayput master and ensure forward compatibility
| Python | mit | veeti/stayput_jinja2 |
df690e4c2f19e30c619db90b8b2dfd77dab54159 | sympy/printing/__init__.py | sympy/printing/__init__.py | """Printing subsystem"""
from pretty import *
from latex import latex, print_latex
from mathml import mathml, print_mathml
from python import python, print_python
from ccode import ccode, print_ccode
from fcode import fcode, print_fcode
from jscode import jscode, print_jscode
from gtk import *
from preview import preview
from repr import srepr
from tree import print_tree
from str import StrPrinter, sstr, sstrrepr
from tableform import TableForm
| """Printing subsystem"""
from pretty import pager_print, pretty, pretty_print, pprint, \
pprint_use_unicode, pprint_try_use_unicode
from latex import latex, print_latex
from mathml import mathml, print_mathml
from python import python, print_python
from ccode import ccode, print_ccode
from fcode import fcode, print_fcode
from jscode import jscode, print_jscode
from gtk import print_gtk
from preview import preview
from repr import srepr
from tree import print_tree
from str import StrPrinter, sstr, sstrrepr
from tableform import TableForm
| Remove glob imports from sympy.printing. | Remove glob imports from sympy.printing.
| Python | bsd-3-clause | Designist/sympy,emon10005/sympy,farhaanbukhsh/sympy,mafiya69/sympy,kaushik94/sympy,atreyv/sympy,kmacinnis/sympy,Mitchkoens/sympy,aktech/sympy,sunny94/temp,grevutiu-gabriel/sympy,wanglongqi/sympy,AunShiLord/sympy,jamesblunt/sympy,emon10005/sympy,shikil/sympy,rahuldan/sympy,diofant/diofant,yashsharan/sympy,kmacinnis/sympy,kaushik94/sympy,atsao72/sympy,AkademieOlympia/sympy,hrashk/sympy,Arafatk/sympy,souravsingh/sympy,drufat/sympy,lindsayad/sympy,skidzo/sympy,grevutiu-gabriel/sympy,kevalds51/sympy,MridulS/sympy,Curious72/sympy,Shaswat27/sympy,abloomston/sympy,lidavidm/sympy,Sumith1896/sympy,postvakje/sympy,madan96/sympy,shipci/sympy,Davidjohnwilson/sympy,maniteja123/sympy,jaimahajan1997/sympy,cswiercz/sympy,wyom/sympy,mafiya69/sympy,MechCoder/sympy,amitjamadagni/sympy,abloomston/sympy,iamutkarshtiwari/sympy,rahuldan/sympy,Arafatk/sympy,pbrady/sympy,souravsingh/sympy,cccfran/sympy,Shaswat27/sympy,beni55/sympy,cccfran/sympy,madan96/sympy,Titan-C/sympy,grevutiu-gabriel/sympy,Vishluck/sympy,aktech/sympy,dqnykamp/sympy,chaffra/sympy,ga7g08/sympy,sahmed95/sympy,kevalds51/sympy,lidavidm/sympy,skidzo/sympy,amitjamadagni/sympy,bukzor/sympy,ahhda/sympy,atsao72/sympy,yashsharan/sympy,ChristinaZografou/sympy,Designist/sympy,Designist/sympy,jerli/sympy,kumarkrishna/sympy,rahuldan/sympy,MridulS/sympy,hargup/sympy,meghana1995/sympy,moble/sympy,madan96/sympy,yashsharan/sympy,jerli/sympy,shipci/sympy,flacjacket/sympy,kmacinnis/sympy,AkademieOlympia/sympy,atsao72/sympy,Arafatk/sympy,hargup/sympy,abhiii5459/sympy,hargup/sympy,saurabhjn76/sympy,garvitr/sympy,asm666/sympy,abhiii5459/sympy,farhaanbukhsh/sympy,yukoba/sympy,vipulroxx/sympy,wyom/sympy,kumarkrishna/sympy,toolforger/sympy,oliverlee/sympy,AkademieOlympia/sympy,postvakje/sympy,bukzor/sympy,sunny94/temp,hrashk/sympy,ga7g08/sympy,ahhda/sympy,ChristinaZografou/sympy,Titan-C/sympy,ahhda/sympy,Gadal/sympy,atreyv/sympy,abhiii5459/sympy,MridulS/sympy,iamutkarshtiwari/sympy,Vishluck/sympy,MechCoder/sympy,saurabhjn76/sympy,abloomston/sympy,toolforger/sympy,mcdaniel67/sympy,Davidjohnwilson/sympy,atreyv/sympy,jerli/sympy,Titan-C/sympy,cswiercz/sympy,souravsingh/sympy,wanglongqi/sympy,mafiya69/sympy,sampadsaha5/sympy,drufat/sympy,skidzo/sympy,sahmed95/sympy,jaimahajan1997/sympy,jbbskinny/sympy,chaffra/sympy,kevalds51/sympy,Vishluck/sympy,sahilshekhawat/sympy,sahilshekhawat/sympy,Mitchkoens/sympy,Curious72/sympy,jamesblunt/sympy,sunny94/temp,AunShiLord/sympy,beni55/sympy,wyom/sympy,bukzor/sympy,debugger22/sympy,pandeyadarsh/sympy,sahilshekhawat/sympy,postvakje/sympy,wanglongqi/sympy,Sumith1896/sympy,jbbskinny/sympy,liangjiaxing/sympy,moble/sympy,AunShiLord/sympy,shikil/sympy,jaimahajan1997/sympy,pbrady/sympy,shikil/sympy,VaibhavAgarwalVA/sympy,iamutkarshtiwari/sympy,drufat/sympy,vipulroxx/sympy,farhaanbukhsh/sympy,sampadsaha5/sympy,saurabhjn76/sympy,chaffra/sympy,skirpichev/omg,kaichogami/sympy,Sumith1896/sympy,vipulroxx/sympy,mcdaniel67/sympy,liangjiaxing/sympy,asm666/sympy,jbbskinny/sympy,pandeyadarsh/sympy,VaibhavAgarwalVA/sympy,dqnykamp/sympy,lindsayad/sympy,Mitchkoens/sympy,aktech/sympy,pandeyadarsh/sympy,yukoba/sympy,maniteja123/sympy,VaibhavAgarwalVA/sympy,Shaswat27/sympy,hrashk/sympy,jamesblunt/sympy,maniteja123/sympy,dqnykamp/sympy,oliverlee/sympy,lidavidm/sympy,MechCoder/sympy,beni55/sympy,cswiercz/sympy,yukoba/sympy,debugger22/sympy,debugger22/sympy,sampadsaha5/sympy,pbrady/sympy,Gadal/sympy,kaichogami/sympy,kaichogami/sympy,ga7g08/sympy,garvitr/sympy,meghana1995/sympy,cccfran/sympy,garvitr/sympy,moble/sympy,kaushik94/sympy,Davidjohnwilson/sympy,toolforger/sympy,asm666/sympy,lindsayad/sympy,oliverlee/sympy,shipci/sympy,Gadal/sympy,ChristinaZografou/sympy,mcdaniel67/sympy,sahmed95/sympy,kumarkrishna/sympy,Curious72/sympy,emon10005/sympy,liangjiaxing/sympy,meghana1995/sympy |
c0dac1383d8ea4847f08a06f997bbf80f1cafca1 | netbox/utilities/api.py | netbox/utilities/api.py | from rest_framework.exceptions import APIException
from rest_framework.serializers import ModelSerializer
WRITE_OPERATIONS = ['create', 'update', 'partial_update', 'delete']
class ServiceUnavailable(APIException):
status_code = 503
default_detail = "Service temporarily unavailable, please try again later."
class WritableSerializerMixin(object):
"""
Returns a flat Serializer from the given model suitable for write operations (POST, PUT, PATCH). This is necessary
to allow write operations on objects which utilize nested serializers.
"""
def get_serializer_class(self):
class WritableSerializer(ModelSerializer):
class Meta:
model = self.queryset.model
fields = '__all__'
if self.action in WRITE_OPERATIONS:
return WritableSerializer
return self.serializer_class
| from rest_framework.exceptions import APIException
from rest_framework.serializers import ModelSerializer
WRITE_OPERATIONS = ['create', 'update', 'partial_update', 'delete']
class ServiceUnavailable(APIException):
status_code = 503
default_detail = "Service temporarily unavailable, please try again later."
class WritableSerializerMixin(object):
"""
Returns a flat Serializer from the given model suitable for write operations (POST, PUT, PATCH). This is necessary
to allow write operations on objects which utilize nested serializers.
"""
def get_serializer_class(self):
class WritableSerializer(ModelSerializer):
class Meta:
model = self.get_queryset().model
fields = '__all__'
if self.action in WRITE_OPERATIONS:
return WritableSerializer
return self.serializer_class
| Fix retrieval of model under viewsets without a statically defined queryset | Fix retrieval of model under viewsets without a statically defined queryset
| Python | apache-2.0 | Alphalink/netbox,Alphalink/netbox,snazy2000/netbox,digitalocean/netbox,Alphalink/netbox,lampwins/netbox,lampwins/netbox,lampwins/netbox,snazy2000/netbox,digitalocean/netbox,Alphalink/netbox,lampwins/netbox,snazy2000/netbox,digitalocean/netbox,digitalocean/netbox,snazy2000/netbox |
ab889e99e78b16cacc2f1fbb86494b8a784c154a | icekit/plugins/image/admin.py | icekit/plugins/image/admin.py | from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['description', 'title', 'thumbnail']
list_display_links = ['description', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
def title(self, image):
return image.title
def description(self, image):
return unicode(image)
admin.site.register(models.Image, ImageAdmin)
| from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['description', 'title', 'thumbnail']
list_display_links = ['description', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
def title(self, image):
return image.title
def description(self, image):
return str(image)
admin.site.register(models.Image, ImageAdmin)
| Fix use of unicode() incompatible with Python 3 | Fix use of unicode() incompatible with Python 3
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit |
5d01c58aef7f101531ecc7a44a83d225fa2fdcc8 | npc/linters/__init__.py | npc/linters/__init__.py | from . import changeling
| """
Linters for verifying the correctness of certain character types
The `commands.lint` function can lint all basic files, but special character
types sometimes need extra checks. The linters in this package encapsulate that
logic.
All linter packages have a single main entry point `lint` which accepts a
character information dict. Various keyword arguments are used for options and
supporting data.
"""
from . import changeling
| Add docstring to linters package | Add docstring to linters package
| Python | mit | aurule/npc,aurule/npc |
0228128b2878c4d5f6942e751b48c8e52192245b | exporters/writers/console_writer.py | exporters/writers/console_writer.py | import json
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
self.pretty_print = self.options.get('pretty_print', False)
def write_batch(self, batch):
for item in batch:
formatted_item = item.formatted
if self.pretty_print:
formatted_item = self._format(formatted_item)
print formatted_item
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')
def _format(self, item):
try:
return json.dumps(json.loads(item), indent=2)
except:
return item
| import json
from exporters.writers.base_writer import BaseWriter, ItemsLimitReached
class ConsoleWriter(BaseWriter):
"""
It is just a writer with testing purposes. It prints every item in console.
"""
def __init__(self, options):
super(ConsoleWriter, self).__init__(options)
self.logger.info('ConsoleWriter has been initiated')
self.pretty_print = self.options.get('pretty_print', False)
def write_batch(self, batch):
for item in batch:
formatted_item = item.formatted
if self.pretty_print:
formatted_item = self._format(formatted_item)
print formatted_item
self._increment_written_items()
if self.items_limit and self.items_limit == self.stats['items_count']:
raise ItemsLimitReached('Finishing job after items_limit reached: {} items written.'.format(self.stats['items_count']))
self.logger.debug('Wrote items')
def _format(self, item):
try:
return json.dumps(json.loads(item), indent=2, sort_keys=True)
except:
return item
| Use sort_keys=True for the ConsoleWritter pretty printing | Use sort_keys=True for the ConsoleWritter pretty printing
| Python | bsd-3-clause | scrapinghub/exporters |
42389e796acba99fe12e30e6ca08672b889bd5f2 | infrastructure/serializers.py | infrastructure/serializers.py | from rest_framework import serializers
from . import models
from scorecard.serializers import GeographySerializer
class FinancialYearSerializer(serializers.ModelSerializer):
class Meta:
model = models.FinancialYear
fields = ["budget_year"]
class BudgetPhaseSerializer(serializers.ModelSerializer):
class Meta:
model = models.BudgetPhase
fields = ["code", "name"]
class ExpenditureSerializer(serializers.ModelSerializer):
financial_year = FinancialYearSerializer(read_only=True)
budget_phase = BudgetPhaseSerializer(read_only=True)
class Meta:
model = models.Expenditure
fields = ["amount", "budget_phase", "financial_year"]
class ProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
geography = GeographySerializer(read_only=True)
class Meta:
model = models.Project
fields = "__all__"
class GeoProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
class Meta:
model = models.Project
fields = ("project_description", "latitude", "longitude", "expenditure")
| from rest_framework import serializers
from . import models
from scorecard.serializers import GeographySerializer
class FinancialYearSerializer(serializers.ModelSerializer):
class Meta:
model = models.FinancialYear
fields = ["budget_year"]
read_only_fields = ["budget_year"]
class BudgetPhaseSerializer(serializers.ModelSerializer):
class Meta:
model = models.BudgetPhase
fields = ["code", "name"]
read_only_fields = ["code", "name"]
class ExpenditureSerializer(serializers.ModelSerializer):
financial_year = FinancialYearSerializer(read_only=True)
budget_phase = BudgetPhaseSerializer(read_only=True)
class Meta:
model = models.Expenditure
fields = ["amount", "budget_phase", "financial_year"]
read_only_fields = ["amount", "budget_phase", "financial_year"]
class ProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
geography = GeographySerializer(read_only=True)
class Meta:
model = models.Project
fields = "__all__"
read_only_fields = [
"function",
"project_description",
"project_number",
"project_type",
"mtsf_service_outcome",
"iudf",
"own_strategic_objectives",
"asset_class",
"asset_subclass",
"ward_location",
"longitude",
"latitude",
]
class GeoProjectSerializer(serializers.ModelSerializer):
expenditure = ExpenditureSerializer(many=True, read_only=True)
class Meta:
model = models.Project
fields = ("project_description", "latitude", "longitude", "expenditure")
read_only_fields = ["project_description", "latitude", "longitude"]
| Make fields readonly, skips rest_framework, validation, speeds up queries | Make fields readonly, skips rest_framework, validation, speeds up queries
| Python | mit | Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data |
53e961870da3aeebf62b39605dc2aab74639c0c7 | internode_ssl_test.py | internode_ssl_test.py | from dtest import Tester, debug
from tools import generate_ssl_stores, putget
class TestInternodeSSL(Tester):
def __init__(self, *args, **kwargs):
Tester.__init__(self, *args, **kwargs)
def putget_with_internode_ssl_test(self):
"""
Simple putget test with internode ssl enabled
@jira_ticket CASSANDRA-9884
"""
cluster = self.cluster
debug("***using internode ssl***")
generate_ssl_stores(self.test_path)
self.cluster.enable_internode_ssl(self.test_path)
cluster.populate(3).start()
session = self.patient_cql_connection(cluster.nodelist()[0])
self.create_ks(session, 'ks', 3)
self.create_cf(session, 'cf', compression=None)
putget(cluster, session)
| from dtest import Tester, debug
from tools import generate_ssl_stores, putget
class TestInternodeSSL(Tester):
def __init__(self, *args, **kwargs):
Tester.__init__(self, *args, **kwargs)
def putget_with_internode_ssl_test(self):
"""
Simple putget test with internode ssl enabled
with default 'all' internode compression
@jira_ticket CASSANDRA-9884
"""
self.__putget_with_internode_ssl_test('all')
def putget_with_internode_ssl_without_compression_test(self):
"""
Simple putget test with internode ssl enabled
without internode compression
@jira_ticket CASSANDRA-9884
"""
self.__putget_with_internode_ssl_test('none')
def __putget_with_internode_ssl_test(self, internode_compression):
cluster = self.cluster
debug("***using internode ssl***")
generate_ssl_stores(self.test_path)
cluster.set_configuration_options({'internode_compression': internode_compression})
cluster.enable_internode_ssl(self.test_path)
cluster.populate(3).start()
session = self.patient_cql_connection(cluster.nodelist()[0])
self.create_ks(session, 'ks', 3)
self.create_cf(session, 'cf', compression=None)
putget(cluster, session)
| Add test for internode_compression: 'none' | Add test for internode_compression: 'none'
| Python | apache-2.0 | blerer/cassandra-dtest,carlyeks/cassandra-dtest,bdeggleston/cassandra-dtest,stef1927/cassandra-dtest,blerer/cassandra-dtest,riptano/cassandra-dtest,snazy/cassandra-dtest,stef1927/cassandra-dtest,beobal/cassandra-dtest,thobbs/cassandra-dtest,pauloricardomg/cassandra-dtest,spodkowinski/cassandra-dtest,krummas/cassandra-dtest,carlyeks/cassandra-dtest,mambocab/cassandra-dtest,iamaleksey/cassandra-dtest,krummas/cassandra-dtest,pcmanus/cassandra-dtest,pauloricardomg/cassandra-dtest,aweisberg/cassandra-dtest,beobal/cassandra-dtest,snazy/cassandra-dtest,aweisberg/cassandra-dtest,iamaleksey/cassandra-dtest,mambocab/cassandra-dtest,riptano/cassandra-dtest,spodkowinski/cassandra-dtest,bdeggleston/cassandra-dtest,thobbs/cassandra-dtest |
0f12f4a2e8b68cf48b9768a6b18a1a560068eac2 | app/timetables/models.py | app/timetables/models.py | from __future__ import unicode_literals
from django.db import models
class Weekday(models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
def clean(self):
"""
Capitalize the first letter of the first word to avoid case
insensitive duplicates for name field.
"""
self.name = self.name.capitalize()
def save(self, *args, **kwargs):
self.clean()
return super(Weekday, self).save(*args, **kwargs)
class Meal(models.Model):
name = models.TextField()
start_time = models.TimeField()
end_time = models.TimeField()
def __str__(self):
return self.name
| from __future__ import unicode_literals
from django.db import models
class Weekday(models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
def clean(self):
"""
Capitalize the first letter of the first word to avoid case
insensitive duplicates for name field.
"""
self.name = self.name.capitalize()
def save(self, *args, **kwargs):
self.clean()
return super(Weekday, self).save(*args, **kwargs)
class Meal(models.Model):
name = models.CharField(max_length=60)
start_time = models.TimeField()
end_time = models.TimeField()
def __str__(self):
return self.name
| Change meal name to charfield | Change meal name to charfield
| Python | mit | teamtaverna/core |
ead78c44dbf3d180ba4ea99a6e107539166025f2 | genes/java/main.py | genes/java/main.py | from genes.apt import commands as apt
from genes.brew import commands as brew
from genes.debconf import commands as debconf
from genes.debian.traits import is_debian
from genes.mac.traits import is_osx
from genes.ubuntu.traits import is_ubuntu
def main(config):
if is_debian() or is_ubuntu():
if config.is_oracle():
# FIXME: debian needs ppa software
apt.add_ppa('webupd8team/java')
apt.update()
debconf.set_selections(config.version + '-installer',
'shared/accepted-oracle-license-v1-1',
'select', 'true')
apt.install(config.version + '-installer')
else:
apt.update()
apt.install(config.version)
elif is_osx():
brew.update()
brew.cask_install('java')
else:
# FIXME: print failure, handle windows
pass
| from typing import Callable, Dict
from genes.apt import commands as apt
from genes.brew import commands as brew
from genes.debconf import commands as debconf
from genes.debian.traits import is_debian
from genes.mac.traits import is_osx
from genes.ubuntu.traits import is_ubuntu
def main(config: Callable[[], Dict]):
if is_debian() or is_ubuntu():
if config.is_oracle():
# FIXME: debian needs ppa software
apt.add_ppa('webupd8team/java')
apt.update()
debconf.set_selections(config.version + '-installer',
'shared/accepted-oracle-license-v1-1',
'select', 'true')
apt.install(config.version + '-installer')
else:
apt.update()
apt.install(config.version)
elif is_osx():
brew.update()
brew.cask_install('java')
else:
# FIXME: print failure, handle windows
pass
| Add typing to java. It's not right yet though :) | Add typing to java. It's not right yet though :)
| Python | mit | hatchery/Genepool2,hatchery/genepool |
8883f1a45595219ae843b3400df1f56ab07aa4fe | corehq/apps/userreports/document_stores.py | corehq/apps/userreports/document_stores.py | from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
| from corehq.apps.locations.models import SQLLocation
from corehq.form_processor.document_stores import ReadonlyFormDocumentStore, ReadonlyCaseDocumentStore
from corehq.form_processor.utils import should_use_sql_backend
from corehq.util.couch import get_db_by_doc_type
from pillowtop.dao.couch import CouchDocumentStore
from pillowtop.dao.exceptions import DocumentNotFoundError
from pillowtop.dao.interface import ReadOnlyDocumentStore
class ReadonlyLocationDocumentStore(ReadOnlyDocumentStore):
def __init__(self, domain):
self.domain = domain
self.queryset = SQLLocation.objects.filter(domain=domain)
def get_document(self, doc_id):
try:
return self.queryset.get(location_id=doc_id).to_json()
except SQLLocation.DoesNotExist as e:
raise DocumentNotFoundError(e)
def iter_document_ids(self, last_id=None):
return iter(self.queryset.location_ids())
def iter_documents(self, ids):
for location in self.queryset.filter(location_id__in=ids):
yield location.to_json()
def get_document_store(domain, doc_type):
use_sql = should_use_sql_backend(domain)
if use_sql and doc_type == 'XFormInstance':
return ReadonlyFormDocumentStore(domain)
elif use_sql and doc_type == 'CommCareCase':
return ReadonlyCaseDocumentStore(domain)
elif doc_type == 'Location':
return ReadonlyLocationDocumentStore(domain)
else:
# all other types still live in couchdb
return CouchDocumentStore(
couch_db=get_db_by_doc_type(doc_type),
domain=domain,
doc_type=doc_type
)
| Add document store for locations | Add document store for locations
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq |
7fd3e82c449ebf46e369d2a8c2bf534cb6b17607 | notebook/lib/pos_tags.py | notebook/lib/pos_tags.py | import nltk
class PosTags:
def tag(self, t):
'''
With a list of tokens, mark their part of speech and return
a list dicts (no native tuple type in dataframes it seems).
'''
pos = nltk.pos_tag(t)
retval = []
for p in pos:
retval.append({"word": p[0], "tag": p[1]})
return retval
if __name__ == "__main__":
from tokens import Tokens
import sys
fn = sys.argv[1]
t = Tokens()
pos_tags = PosTags()
with open(fn) as f:
for l in f:
tokens = t.tokenize(l)
pos = pos_tags.tag(tokens)
s = ""
for p in pos:
s = s + p["word"] + " (" + p["tag"] + ") | "
print(s + "\n")
| import nltk
class PosTags:
def tag(self, t, as_dicts=True):
'''
With a list of tokens, mark their part of speech and return
a list dicts (no native tuple type in dataframes it seems).
'''
pos = nltk.pos_tag(t)
if as_dicts:
return self.to_dicts(pos)
else:
return pos
def to_dicts(self, pos):
'''
With a list of POS tag tuples, convert the tuples to dicts
because Spark can't store tuples.
'''
retval = []
for p in pos:
retval.append({"word": p[0], "tag": p[1]})
return retval
if __name__ == "__main__":
from tokens import Tokens
import sys
fn = sys.argv[1]
t = Tokens()
pos_tags = PosTags()
with open(fn) as f:
for l in f:
tokens = t.tokenize(l)
pos = pos_tags.tag(tokens)
s = ""
for p in pos:
s = s + p["word"] + " (" + p["tag"] + ") | "
print(s + "\n")
| Change return to allow for original tuples to come out since we'll need them for chunking | Change return to allow for original tuples to come out since we'll need them for chunking
| Python | mit | mjcollin/2016spr,mjcollin/2016spr,mjcollin/2016spr |
6b515e671c33458d736c6639a8b3391f94dfee73 | buildPy2app.py | buildPy2app.py | """
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
from glob import glob
import syncplay
APP = ['syncplayClient.py']
DATA_FILES = [
('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')),
]
OPTIONS = {
'iconfile':'resources/icon.icns',
'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'},
'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'},
'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'],
'plist': {
'CFBundleName':'Syncplay',
'CFBundleShortVersionString':syncplay.version,
'CFBundleIdentifier':'pl.syncplay.Syncplay',
'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved'
}
}
setup(
app=APP,
name='Syncplay',
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
| """
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
from glob import glob
import syncplay
APP = ['syncplayClient.py']
DATA_FILES = [
('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')),
]
OPTIONS = {
'iconfile':'resources/icon.icns',
'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'},
'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'},
'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'],
'plist': {
'CFBundleName':'Syncplay',
'CFBundleShortVersionString':syncplay.version,
'CFBundleIdentifier':'pl.syncplay.Syncplay',
'LSMinimumSystemVersion':'10.11.0',
'NSHumanReadableCopyright': '@ 2018 Syncplay All Rights Reserved'
}
}
setup(
app=APP,
name='Syncplay',
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
| Set 10.11.0 as minimum macOS version in the .app bundle | Set 10.11.0 as minimum macOS version in the .app bundle
| Python | apache-2.0 | Syncplay/syncplay,Syncplay/syncplay,NeverDecaf/syncplay,alby128/syncplay,alby128/syncplay,NeverDecaf/syncplay |
5ac75f5cf37b86a2b512c4fb9d64c122ccbfe557 | admin_interface/migrations/0025_theme_language_chooser_control.py | admin_interface/migrations/0025_theme_language_chooser_control.py | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("admin_interface", "0024_remove_theme_css"),
]
operations = [
migrations.AddField(
model_name="theme",
name="language_chooser_control",
field=models.CharField(
choices=[
("default-select", "Default Select"),
("minimal-select", "Minimal Select"),
],
default="select",
max_length=20,
verbose_name="control",
),
),
]
| from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("admin_interface", "0024_remove_theme_css"),
]
operations = [
migrations.AddField(
model_name="theme",
name="language_chooser_control",
field=models.CharField(
choices=[
("default-select", "Default Select"),
("minimal-select", "Minimal Select"),
],
default="default-select",
max_length=20,
verbose_name="control",
),
),
]
| Fix `language_choose_control` migration default value. | Fix `language_choose_control` migration default value.
Co-Authored-By: Éric <34afff4eac2f9b94bd269db558876db6be315161@users.noreply.github.com>
| Python | mit | fabiocaccamo/django-admin-interface,fabiocaccamo/django-admin-interface,fabiocaccamo/django-admin-interface |
8fb8a77dcad4aa657b7ddb637459a6279a21755e | alexandria/drivers.py | alexandria/drivers.py | # coding=utf-8
import types
import config
class Driver(object):
def __init__(self):
self.driver_type = self.__class__.__name__
# Get credentials from conf files for CMDB
pass
def get_driver_type(self):
return self.driver_type
def get_ci(self,ci):
pass
def push_ci(self,ci):
pass
class Itop(Driver):
def get_ci(self,ci):
print "Get from itop"
return True
def push_ci(self):
pass
class Redfish(Driver):
def get_ci(self,ci):
print "Get from redfish"
return True
class Ironic(Driver):
pass
class Mondorescue(Driver):
pass
class Fakecmdb(Driver):
pass
class Fakeprovider(Driver):
def get_ci(self,ci):
import app
# Simulate a driver that will provide Manager data.
# TODO a connect method must be implemented as
# Assuming the connection is ok.
# Now create a manager model from reference model.
ci.ci_type = "Manager"
ci.data = config.alexandria.model.Manager
class DriverCollection(list):
pass
| # coding=utf-8
import types
import pprint
import config
class Driver(object):
def __init__(self):
self.driver_type = self.__class__.__name__
# Get credentials from conf files for CMDB
pass
def get_driver_type(self):
return self.driver_type
def get_ci(self,ci):
pass
def push_ci(self,ci):
pass
class Itop(Driver):
def get_ci(self,ci):
print "Get from itop"
return True
def push_ci(self):
pass
class Redfish(Driver):
def get_ci(self,ci):
print "Get from redfish"
return True
class Ironic(Driver):
pass
class Mondorescue(Driver):
pass
class Fakecmdb(Driver):
pass
class Fakeprovider(Driver):
def get_ci(self,ci):
import app
# Simulate a driver that will provide Manager data.
# TODO a connect method must be implemented as
# Assuming the connection is ok.
# Now create a manager model from reference model.
ci.ci_type = "Manager"
ci.data = config.alexandria.model.Manager.copy()
if ci.data is config.alexandria.model.Manager:
print "identical"
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(ci.data)
class DriverCollection(list):
pass
| Copy reference object example. This commit is just implemented to validate the medhod. But this is not conveniant, because reference models must not be changed. So next step is to make them private attributes. | Copy reference object example.
This commit is just implemented to validate the medhod.
But this is not conveniant, because reference models must not be
changed.
So next step is to make them private attributes.
| Python | apache-2.0 | sl4shme/alexandria,sl4shme/alexandria,sl4shme/alexandria,uggla/alexandria |
138aa351b3dbe95f3cdebf01dbd3c75f1ce3fac2 | src/ggrc/fulltext/sql.py | src/ggrc/fulltext/sql.py | # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
from ggrc import db
from . import Indexer
class SqlIndexer(Indexer):
def create_record(self, record, commit=True):
for k,v in record.properties.items():
db.session.add(self.record_type(
key=record.key,
type=record.type,
context_id=record.context_id,
tags=record.tags,
property=k,
content=v,
))
if commit:
db.session.commit()
def update_record(self, record, commit=True):
self.delete_record(record.key, commit=False)
self.create_record(record, commit=commit)
def delete_record(self, key, type, commit=True):
db.session.query(self.record_type).filter(\
self.record_type.key == key,
self.record_type.type == type).delete()
if commit:
db.session.commit()
def delete_all_records(self, commit=True):
db.session.query(self.record_type).delete()
if commit:
db.session.commit()
| # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
from ggrc import db
from . import Indexer
class SqlIndexer(Indexer):
def create_record(self, record, commit=True):
for k,v in record.properties.items():
db.session.add(self.record_type(
key=record.key,
type=record.type,
context_id=record.context_id,
tags=record.tags,
property=k,
content=v,
))
if commit:
db.session.commit()
def update_record(self, record, commit=True):
self.delete_record(record.key, record.type, commit=False)
self.create_record(record, commit=commit)
def delete_record(self, key, type, commit=True):
db.session.query(self.record_type).filter(\
self.record_type.key == key,
self.record_type.type == type).delete()
if commit:
db.session.commit()
def delete_all_records(self, commit=True):
db.session.query(self.record_type).delete()
if commit:
db.session.commit()
| Fix test broken due to delete_record change | Fix test broken due to delete_record change
| Python | apache-2.0 | kr41/ggrc-core,uskudnik/ggrc-core,vladan-m/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,uskudnik/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,NejcZupec/ggrc-core,uskudnik/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core |
756c9ae9487ac5c35f069b79e792043bca0af27e | panoptes_client/utils.py | panoptes_client/utils.py | import functools
ITERABLE_TYPES = (
list,
set,
tuple,
)
try:
from numpy import ndarray
ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,)
except ImportError:
pass
def isiterable(v):
return isinstance(v, ITERABLE_TYPES)
def batchable(func=None, batch_size=100):
def do_batch(*args, **kwargs):
_batch_size = kwargs.pop('batch_size', batch_size)
if isiterable(args[0]):
_self = None
to_batch = args[0]
args = args[1:]
else:
_self = args[0]
to_batch = args[1]
args = args[2:]
if not isiterable(to_batch):
to_batch = [to_batch]
for _batch in [
to_batch[i:i+_batch_size]
for i in xrange(0, len(to_batch), _batch_size)
]:
if _self is None:
func(_batch, *args, **kwargs)
else:
func(_self, _batch, *args, **kwargs)
if func is None:
return functools.partial(batchable, batch_size=batch_size)
return do_batch
| import functools
ITERABLE_TYPES = (
list,
set,
tuple,
)
try:
from numpy import ndarray
ITERABLE_TYPES = ITERABLE_TYPES + (ndarray,)
except ImportError:
pass
def isiterable(v):
return isinstance(v, ITERABLE_TYPES)
def batchable(func=None, batch_size=100):
def do_batch(*args, **kwargs):
_batch_size = kwargs.pop('batch_size', batch_size)
if isiterable(args[0]):
_self = None
to_batch = args[0]
args = args[1:]
else:
_self = args[0]
to_batch = args[1]
args = args[2:]
if not isiterable(to_batch):
to_batch = [to_batch]
if isinstance(to_batch, set):
to_batch = list(to_batch)
for _batch in [
to_batch[i:i+_batch_size]
for i in xrange(0, len(to_batch), _batch_size)
]:
if _self is None:
func(_batch, *args, **kwargs)
else:
func(_self, _batch, *args, **kwargs)
if func is None:
return functools.partial(batchable, batch_size=batch_size)
return do_batch
| Fix passing sets to batchable methods | Fix passing sets to batchable methods
Sets don't support indexing, so convert them to lists.
| Python | apache-2.0 | zooniverse/panoptes-python-client |
a8904432a53aa82f8c1f0efbdd88114663120802 | containers/containers.py | containers/containers.py | null | try:
from urllib import urlretrieve
except ImportError:
from urllib.request import urlretrieve
def simple_discovery(path, secure=True):
if secure:
protocol = 'https'
else:
protocol = 'http'
urlretrieve('{protocol}://{path}.aci'.format(path=path,
protocol=protocol),
'image.aci'.format(path=path))
| Add some sort of simple image discovery | Add some sort of simple image discovery
| Python | mit | kragniz/containers |
e170666cbbc1f2a61c0ffa077c66da4556a6c5bb | app/packages/views.py | app/packages/views.py | import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = (len(featured_list) / 3) * 3
featured_list = featured_list[:length]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
return jsonify(results=json_data)
| import requests
from . import packages
from models import Package, Downloads
from flask import jsonify
from datetime import timedelta
from app import cache
from utils import cache_timeout
@packages.route('/stats', methods=['GET'])
@cache_timeout
@cache.cached()
def stats():
resp = dict()
resp["count"] = Package.get_count()
resp["day"] = Downloads.get_overall_downloads_count(timedelta(days=1))
resp["week"] = Downloads.get_overall_downloads_count(timedelta(days=7))
resp["month"] = Downloads.get_overall_downloads_count(timedelta(days=30))
return jsonify(resp)
@packages.route('/featured', methods=['GET'])
@cache_timeout
@cache.cached()
def featured():
package_list = requests.get("https://atom.io/api/packages/featured")
theme_list = requests.get("https://atom.io/api/themes/featured")
featured_list = package_list.json() + theme_list.json()
# limit data to multiples of three
length = ((len(featured_list) + 2) / 3) * 3
featured_list = featured_list[:(length - 2)]
json_data = []
for item in featured_list:
obj = Package.get_package(item['name'])
if obj is not None:
json_data.append(obj.get_json())
for item in ["docblockr", "git-log"]:
obj = Package.get_package(item)
json_data.append(obj.get_json())
return jsonify(results=json_data)
| Add my packages to featured list | Add my packages to featured list
| Python | bsd-2-clause | NikhilKalige/atom-website,NikhilKalige/atom-website,NikhilKalige/atom-website |
33f2636e1de536a633cec9332362252b0b614817 | serpent/templates/SerpentGamePlugin/files/serpent_game.py | serpent/templates/SerpentGamePlugin/files/serpent_game.py | from serpent.game import Game
from .api.api import MyGameAPI
from serpent.utilities import Singleton
from serpent.input_controller import InputControllers
from serpent.game_launchers.web_browser_game_launcher import WebBrowser
class SerpentGame(Game, metaclass=Singleton):
def __init__(self, **kwargs):
kwargs["platform"] = "PLATFORM"
kwargs["input_controller"] = InputControllers.PYAUTOGUI
kwargs["window_name"] = "WINDOW_NAME"
kwargs["app_id"] = "APP_ID"
kwargs["app_args"] = None
kwargs["executable_path"] = "EXECUTABLE_PATH"
kwargs["url"] = "URL"
kwargs["browser"] = WebBrowser.DEFAULT
super().__init__(**kwargs)
self.api_class = MyGameAPI
self.api_instance = None
@property
def screen_regions(self):
regions = {
"SAMPLE_REGION": (0, 0, 0, 0)
}
return regions
@property
def ocr_presets(self):
presets = {
"SAMPLE_PRESET": {
"extract": {
"gradient_size": 1,
"closing_size": 1
},
"perform": {
"scale": 10,
"order": 1,
"horizontal_closing": 1,
"vertical_closing": 1
}
}
}
return presets
| from serpent.game import Game
from .api.api import MyGameAPI
from serpent.utilities import Singleton
from serpent.game_launchers.web_browser_game_launcher import WebBrowser
class SerpentGame(Game, metaclass=Singleton):
def __init__(self, **kwargs):
kwargs["platform"] = "PLATFORM"
kwargs["window_name"] = "WINDOW_NAME"
kwargs["app_id"] = "APP_ID"
kwargs["app_args"] = None
kwargs["executable_path"] = "EXECUTABLE_PATH"
kwargs["url"] = "URL"
kwargs["browser"] = WebBrowser.DEFAULT
super().__init__(**kwargs)
self.api_class = MyGameAPI
self.api_instance = None
@property
def screen_regions(self):
regions = {
"SAMPLE_REGION": (0, 0, 0, 0)
}
return regions
@property
def ocr_presets(self):
presets = {
"SAMPLE_PRESET": {
"extract": {
"gradient_size": 1,
"closing_size": 1
},
"perform": {
"scale": 10,
"order": 1,
"horizontal_closing": 1,
"vertical_closing": 1
}
}
}
return presets
| Remove kwargs["input_controller"] from the Game plugin template | Remove kwargs["input_controller"] from the Game plugin template
| Python | mit | SerpentAI/SerpentAI |
d6433001f3660c9c4506fe5e1f62c0a52edd02f7 | project/djenerator/tests.py | project/djenerator/tests.py | #!/usr/bin/env python
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
| #!/usr/bin/env python
"""
This module contains tests for djenerator app.
"""
from django.test import TestCase
from model_reader import is_instance_of_model
from models import ExtendingModel
from models import NotExtendingModel
from models import TestModel0
from models import TestModel1
from models import TestModelA
from models import TestModelB
from models import TestModelC
from models import TestModelD
from models import TestModelE
from models import TestModelX
from models import TestModelY
class TestInstanceOfModel(TestCase):
def test(self):
models = [TestModel0, TestModel1, TestModelA, TestModelB, TestModelC,
TestModelD, TestModelE, TestModelX, TestModelY, ExtendingModel]
for model in models:
self.assertTrue(is_instance_of_model(model))
self.assertFalse(is_instance_of_model(NotExtendingModel))
def not_extending_model_function():
pass
self.assertFalse(is_instance_of_model(not_extending_model_function))
| Test Cases for is instance of Model function | Test Cases for is instance of Model function
| Python | mit | mostafa-mahmoud/djenerator,aelguindy/djenerator,mostafa-mahmoud/djenerator |
6631906fc126eadc114a7ee673194da4880dc960 | flask_admin/contrib/geoa/typefmt.py | flask_admin/contrib/geoa/typefmt.py | from flask_admin.contrib.sqla.typefmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
import json
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements import WKBElement
from sqlalchemy import func
from flask import current_app
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leaflet",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value))
else:
geojson = current_app.extensions['sqlalchemy'].db.session.scalar(func.ST_AsGeoJson(value.ST_Transform( 4326)))
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
| from flask_admin.contrib.sqla.typefmt import DEFAULT_FORMATTERS as BASE_FORMATTERS
from jinja2 import Markup
from wtforms.widgets import html_params
from geoalchemy2.shape import to_shape
from geoalchemy2.elements import WKBElement
from sqlalchemy import func
def geom_formatter(view, value):
params = html_params(**{
"data-role": "leaflet",
"disabled": "disabled",
"data-width": 100,
"data-height": 70,
"data-geometry-type": to_shape(value).geom_type,
"data-zoom": 15,
})
if value.srid is -1:
value.srid = 4326
geojson = view.model.query.with_entities(func.ST_AsGeoJSON(value)).scalar()
return Markup('<textarea %s>%s</textarea>' % (params, geojson))
DEFAULT_FORMATTERS = BASE_FORMATTERS.copy()
DEFAULT_FORMATTERS[WKBElement] = geom_formatter
| Remove Flask-SQLAlchemy dependency It should be noted that the declarative base still has to be configured like this: | Remove Flask-SQLAlchemy dependency
It should be noted that the declarative base still has to be configured
like this:
```python
MyBase:
query = session.query_property()
```
Also decreased code duplication and removed unused imports.
| Python | bsd-3-clause | torotil/flask-admin,likaiguo/flask-admin,iurisilvio/flask-admin,toddetzel/flask-admin,mikelambert/flask-admin,lifei/flask-admin,likaiguo/flask-admin,rochacbruno/flask-admin,ArtemSerga/flask-admin,closeio/flask-admin,betterlife/flask-admin,jschneier/flask-admin,torotil/flask-admin,toddetzel/flask-admin,closeio/flask-admin,jmagnusson/flask-admin,toddetzel/flask-admin,iurisilvio/flask-admin,quokkaproject/flask-admin,torotil/flask-admin,flask-admin/flask-admin,quokkaproject/flask-admin,betterlife/flask-admin,jschneier/flask-admin,quokkaproject/flask-admin,mikelambert/flask-admin,jmagnusson/flask-admin,flask-admin/flask-admin,mikelambert/flask-admin,iurisilvio/flask-admin,likaiguo/flask-admin,jschneier/flask-admin,flask-admin/flask-admin,rochacbruno/flask-admin,mikelambert/flask-admin,lifei/flask-admin,jschneier/flask-admin,betterlife/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,toddetzel/flask-admin,ArtemSerga/flask-admin,flask-admin/flask-admin,iurisilvio/flask-admin,betterlife/flask-admin,ArtemSerga/flask-admin,quokkaproject/flask-admin,closeio/flask-admin,likaiguo/flask-admin,closeio/flask-admin,lifei/flask-admin,lifei/flask-admin,torotil/flask-admin,ArtemSerga/flask-admin |
e35ff2f0e45289c40a57c9488156829c60f9d3a0 | vumi_http_proxy/clickme.py | vumi_http_proxy/clickme.py | #!/usr/bin/env python
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
cli.interface = str(interface)
cli.port = port
"""This script runs vumi-http-proxy on <interface>:<port>"""
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
| #!/usr/bin/env python
import click
from vumi_http_proxy import http_proxy
@click.command()
@click.option('--interface', default="0.0.0.0", help='eg 0.0.0.0')
@click.option('--port', default=8080, help='eg 80')
def cli(interface, port):
"""This script runs vumi-http-proxy on <interface>:<port>"""
interface = str(interface)
click.echo("Starting connection to %s:%d" % (interface, port))
i = http_proxy.Initialize(["asdf.com"], interface, port)
i.main()
if __name__ == '__main__':
cli()
| Change unicode ip to string | Change unicode ip to string
| Python | bsd-3-clause | praekelt/vumi-http-proxy,praekelt/vumi-http-proxy |
435b989d75b9e57cf2fe5fec6892c481a278a102 | examples/capabilities/selenoid_cap_file.py | examples/capabilities/selenoid_cap_file.py | # Desired capabilities example file for Selenoid Grid
#
# The same result can be achieved on the command-line with:
# --cap-string='{"selenoid:options": {"enableVNC": true}}'
capabilities = {
"screenResolution": "1280x1024x24",
"selenoid:options": {
"enableVNC": True,
"enableVideo": False,
},
}
| # Desired capabilities example file for Selenoid Grid
#
# The same result can be achieved on the command-line. Eg:
# --cap-string='{"selenoid:options": {"enableVNC": true}}'
capabilities = {
"acceptSslCerts": True,
"acceptInsecureCerts": True,
"screenResolution": "1920x1080x24",
"selenoid:options": {
"enableVNC": True,
"enableVideo": False,
},
}
| Update an example capabilities file | Update an example capabilities file
| Python | mit | mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase |
a26f3ee3df1f70302bc524e3a8decb1a1266aadd | devito/data/meta.py | devito/data/meta.py | from devito.tools import Tag
__all__ = ['DOMAIN', 'OWNED', 'HALO', 'NOPAD', 'FULL',
'LEFT', 'RIGHT', 'CENTER']
class DataRegion(Tag):
pass
DOMAIN = DataRegion('domain')
OWNED = DataRegion('owned') # within DOMAIN
HALO = DataRegion('halo')
NOPAD = DataRegion('nopad') # == DOMAIN+HALO
FULL = DataRegion('full') # == DOMAIN+HALO+PADDING
class DataSide(Tag):
pass
LEFT = DataSide('left')
RIGHT = DataSide('right')
CENTER = DataSide('center')
| from devito.tools import Tag
__all__ = ['DOMAIN', 'OWNED', 'HALO', 'NOPAD', 'FULL',
'LEFT', 'RIGHT', 'CENTER']
class DataRegion(Tag):
pass
DOMAIN = DataRegion('domain')
OWNED = DataRegion('owned') # within DOMAIN
HALO = DataRegion('halo')
NOPAD = DataRegion('nopad') # == DOMAIN+HALO
FULL = DataRegion('full') # == DOMAIN+HALO+PADDING
class DataSide(Tag):
pass
LEFT = DataSide('left', -1)
CENTER = DataSide('center', 0)
RIGHT = DataSide('right', 1)
| Add static value to LEFT, CENTER, RIGHT | data: Add static value to LEFT, CENTER, RIGHT
| Python | mit | opesci/devito,opesci/devito |
220748a5cc481b8df76af6a1301af94def603ee2 | paci/helpers/display_helper.py | paci/helpers/display_helper.py | """Helper to output stuff"""
from tabulate import tabulate
def print_list(header, entries):
"""Prints out a list"""
print(tabulate(entries, header, tablefmt="grid"))
def print_table(entries):
"""Prints out a table"""
print(tabulate(entries, tablefmt="plain"))
def std_input(text, default):
"""Get input or return default if none is given."""
return input(text.format(default)) or default
| """Helper to output stuff"""
from tabulate import tabulate
import os
def print_list(header, entries):
"""Prints out a list"""
print(tabulate(fix_descriptions(entries), header, tablefmt="presto"))
def print_table(entries):
"""Prints out a table"""
print(tabulate(cleanup_entries(entries), tablefmt="plain"))
def std_input(text, default):
"""Get input or return default if none is given."""
return input(text.format(default)) or default
def fix_descriptions(entries):
"""Fixes the description to fit into the terminal"""
clean_entries = []
ml = get_max_desc_width(get_longest_list(entries))
for entry in entries:
clean_entry = entry
max_value = max(entry, key=len)
for idx, val in enumerate(entry):
if val is max_value:
clean_entry[idx] = entry[idx][:ml] + (entry[idx][ml:] and ' [..]')
clean_entries.append(clean_entry)
return clean_entries
def get_longest_list(entries):
max_list = ['']*len(entries[0])
for entry in entries:
for idx, val in enumerate(entry):
if len(val) > len(max_list[idx]):
max_list[idx] = val
return max_list
def get_max_desc_width(lst):
_, columns = os.popen('stty size', 'r').read().split()
length = int(columns)
max_value = max(lst, key=len)
for val in lst:
if val is not max_value:
length -= len(val)
return length - 15
| Fix how tables are printed on smaller screens | Fix how tables are printed on smaller screens
| Python | mit | tradebyte/paci,tradebyte/paci |
e42f77d374bab66fb1a90322c3b36c8f75f2499c | pft/errors.py | pft/errors.py | """Module that contains error handlers."""
from flask import render_template, Blueprint
error = Blueprint('error', __name__)
@error.app_errorhandler(404)
def page_not_found(e):
"""Return page not found HTML page."""
return render_template('404.html'), 404
@error.app_errorhandler(500)
def internal_server_error(e):
"""Return internal server error HTML page."""
return render_template('500.html'), 500
| """Module that contains error handlers."""
from flask import render_template, Blueprint
from .database import db
error = Blueprint('error', __name__)
@error.app_errorhandler(404)
def page_not_found(e):
"""Return page not found HTML page."""
return render_template('404.html'), 404
@error.app_errorhandler(500)
def internal_server_error(e):
"""Return internal server error HTML page."""
db.session.rollback()
return render_template('500.html'), 500
| Add database rollback to error handler | Add database rollback to error handler
| Python | unknown | gregcowell/PFT,gregcowell/BAM,gregcowell/BAM,gregcowell/PFT |
b728253a668c7ff2fba12678d77344bfc645e40b | dusty/daemon.py | dusty/daemon.py | import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket():
try:
os.unlink(SOCKET_PATH)
except OSError:
if os.path.exists(SOCKET_PATH):
raise
def _listen_on_socket():
_clean_up_existing_socket()
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(SOCKET_PATH)
sock.listen(1)
logging.info('Listening on socket at {}'.format(SOCKET_PATH))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket()
if __name__ == '__main__':
main()
| import os
import atexit
import logging
import socket
from .preflight import preflight_check
from .log import configure_logging
from .notifier import notify
from .constants import SOCKET_PATH, SOCKET_TERMINATOR
def _clean_up_existing_socket(socket_path):
try:
os.unlink(socket_path)
except OSError:
if os.path.exists(socket_path):
raise
def _listen_on_socket(socket_path):
_clean_up_existing_socket(socket_path)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(socket_path)
sock.listen(1)
logging.info('Listening on socket at {}'.format(socket_path))
notify('Dusty is listening for commands')
atexit.register(notify, 'Dusty daemon has terminated')
while True:
try:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
logging.info('Received command: {}'.format(data))
connection.sendall('Received: {}\n'.format(data))
connection.sendall(SOCKET_TERMINATOR)
finally:
connection.close()
except KeyboardInterrupt:
break
except:
logging.exception('Exception on socket listen')
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
_listen_on_socket(SOCKET_PATH)
if __name__ == '__main__':
main()
| Make this easier to test, which we'll get to a bit later | Make this easier to test, which we'll get to a bit later
| Python | mit | gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty |
fca363dec1ff73e34e25084322d5a31dd6fbc1ee | simplestatistics/statistics/coefficient_of_variation.py | simplestatistics/statistics/coefficient_of_variation.py | from .standard_deviation import standard_deviation
from .mean import mean
def coefficient_of_variation(data):
"""
The `coefficient_of_variation`_ is the ratio of the standard deviation to the mean
.. _`coefficient of variation`: https://en.wikipedia.org/wiki/Coefficient_of_variation
Args:
data: A list of numerical objects.
Returns:
A float object.
Examples:
>>> coefficient_of_variation([1, 2, 3])
0.5
>>> coefficient_of_variation([1, 2, 3, 4])
0.5163977794943222
>>> coefficient_of_variation([-1, 0, 1, 2, 3, 4])
1.247219128924647
"""
return standard_deviation(data) / mean(data)
| from .standard_deviation import standard_deviation
from .mean import mean
def coefficient_of_variation(data, sample = True):
"""
The `coefficient of variation`_ is the ratio of the standard deviation to the mean.
.. _`coefficient of variation`: https://en.wikipedia.org/wiki/Coefficient_of_variation
Args:
data: A list of numerical objects.
Returns:
A float object.
Examples:
>>> coefficient_of_variation([1, 2, 3])
0.5
>>> ss.coefficient_of_variation([1, 2, 3], False)
0.408248290463863
>>> coefficient_of_variation([1, 2, 3, 4])
0.5163977794943222
>>> coefficient_of_variation([-1, 0, 1, 2, 3, 4])
1.247219128924647
"""
return standard_deviation(data, sample) / mean(data)
| Add sample param to CV function | Add sample param to CV function
Boolean param to make possible to calculate coefficient of variation
for population (default is sample).
| Python | unknown | tmcw/simple-statistics-py,sheriferson/simplestatistics,sheriferson/simple-statistics-py |
b62415c19459d9e5819b82f464731b166157811d | gym/envs/tests/test_registration.py | gym/envs/tests/test_registration.py | # -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in e.message, 'Unexpected message: {}'.format(e)
else:
assert False
| # -*- coding: utf-8 -*-
from gym import error, envs
from gym.envs import registration
from gym.envs.classic_control import cartpole
def test_make():
env = envs.make('CartPole-v0')
assert env.spec.id == 'CartPole-v0'
assert isinstance(env, cartpole.CartPoleEnv)
def test_spec():
spec = envs.spec('CartPole-v0')
assert spec.id == 'CartPole-v0'
def test_missing_lookup():
registry = registration.EnvRegistry()
registry.register(id='Test-v0', entry_point=None)
registry.register(id='Test-v15', entry_point=None)
registry.register(id='Test-v9', entry_point=None)
registry.register(id='Other-v100', entry_point=None)
try:
registry.spec('Test-v1')
except error.UnregisteredEnv:
pass
else:
assert False
def test_malformed_lookup():
registry = registration.EnvRegistry()
try:
registry.spec(u'“Breakout-v0”')
except error.Error as e:
assert 'malformed environment ID' in '{}'.format(e), 'Unexpected message: {}'.format(e)
else:
assert False
| Fix exception message formatting in Python3 | Fix exception message formatting in Python3
| Python | mit | d1hotpep/openai_gym,machinaut/gym,machinaut/gym,d1hotpep/openai_gym,dianchen96/gym,Farama-Foundation/Gymnasium,dianchen96/gym,Farama-Foundation/Gymnasium |
c0ed918e09bcb0c0eb1aec20e375c7da8c7466ef | tests/NongeneratingSymbolsRemove/RecursiveTest.py | tests/NongeneratingSymbolsRemove/RecursiveTest.py | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import *
class RecursiveTest(TestCase):
pass
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import TestCase, main
from grammpy import *
from grammpy_transforms import *
class A(Nonterminal):
pass
class B(Nonterminal):
pass
class C(Nonterminal):
pass
class D(Nonterminal):
pass
class E(Nonterminal):
pass
class RuleAto0B(Rule):
rule = ([A], [0, B])
class RuleBto1(Rule):
fromSymbol = B
toSymbol = 1
class RuleCto1D(Rule):
rule = ([C], [1, D])
class RuleDto0E(Rule):
rule = ([D], [0, E])
class RuleEto0C(Rule):
rule = ([E], [0, C])
class RecursiveTest(TestCase):
def __init__(self, methodName):
super().__init__(methodName)
self.g = Grammar()
def setUp(self):
self.g = Grammar(terminals=[0, 1],
nonterminals=[A, B],
rules=[RuleAto0B, RuleBto1, RuleCto1D, RuleDto0E, RuleEto0C])
if __name__ == '__main__':
main()
| Add grammar for test of recursive grammar | Add grammar for test of recursive grammar
| Python | mit | PatrikValkovic/grammpy |
1e078b88b4eecaa5a9d0a2ada9a64237fe3c4f09 | users/management/commands/social_auth_migrate.py | users/management/commands/social_auth_migrate.py | from allauth.socialaccount.models import SocialAccount
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from social_django.models import UserSocialAuth
class Command(BaseCommand):
help = 'Migrate allauth social logins to social auth'
def handle(self, *args, **options):
self.stdout.write(self.style.SUCCESS('Going through all SocialAccount objects...'))
# Retrieve existing objects
providers = {}
for usa in UserSocialAuth.objects.all():
provider = providers.setdefault(usa.provider, {})
provider[usa.user_id] = usa
for sa in SocialAccount.objects.all():
provider = providers.setdefault(sa.provider, {})
if sa.user_id in provider:
continue
provider[sa.user_id] = UserSocialAuth.objects.create(
user=sa.user,
provider=sa.provider,
uid=sa.uid,
extra_data=sa.extra_data,
)
self.stdout.write(self.style.SUCCESS('Added. (provider: {}, uid: {})'.format(sa.provider, sa.uid)))
self.stdout.write(self.style.SUCCESS('Done.'))
| from allauth.socialaccount.models import SocialAccount, SocialApp
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from social_django.models import UserSocialAuth
class Command(BaseCommand):
help = 'Migrate allauth social logins to social auth'
def add_arguments(self, parser):
parser.add_argument('--apps', action='store_true', dest='apps',
help='Print social app keys and secrets')
parser.add_argument('--accounts', action='store_true', dest='accounts',
help='Migrate accounts')
def migrate_accounts(self):
self.stdout.write(self.style.SUCCESS('Going through all SocialAccount objects...'))
# Retrieve existing objects
providers = {}
for usa in UserSocialAuth.objects.all():
provider = providers.setdefault(usa.provider, {})
provider[usa.user_id] = usa
for sa in SocialAccount.objects.all():
provider = providers.setdefault(sa.provider, {})
if sa.user_id in provider:
continue
provider[sa.user_id] = UserSocialAuth.objects.create(
user=sa.user,
provider=sa.provider,
uid=sa.uid,
extra_data=sa.extra_data,
)
self.stdout.write(self.style.SUCCESS('Added. (provider: {}, uid: {})'.format(sa.provider, sa.uid)))
self.stdout.write(self.style.SUCCESS('Done.'))
def migrate_apps(self):
for app in SocialApp.objects.all():
app_id = app.provider.upper()
print("SOCIAL_AUTH_%s_KEY = '%s'" % (app_id, app.client_id))
print("SOCIAL_AUTH_%s_SECRET = '%s'" % (app_id, app.secret))
print()
def handle(self, *args, **options):
if options['apps']:
self.migrate_apps()
if options['accounts']:
self.migrate_accounts()
| Implement app secret printing to social_auth migration tool | Implement app secret printing to social_auth migration tool
| Python | mit | mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo |
7bef391ef605716a02cdc9bb572234ff26082ba4 | antxetamedia/archive/templatetags/absolute_urls.py | antxetamedia/archive/templatetags/absolute_urls.py | from django import template
register = template.Library()
@register.assignment_tag(takes_context=True)
def build_absolute_uri(context, url):
if url:
return context['request'].build_absolute_uri(url)
| from django import template
register = template.Library()
@register.simple_tag(takes_context=True)
def build_absolute_uri(context, url):
if url:
return context['request'].build_absolute_uri(url)
| Use simple_tag decorators as those tags can now also be used for assignments | Use simple_tag decorators as those tags can now also be used for assignments
| Python | agpl-3.0 | GISAElkartea/amv2,GISAElkartea/amv2,GISAElkartea/amv2 |
314d45476bd079fe3b9bb97af181b2b6f306fe73 | cozify/test/test_config.py | cozify/test/test_config.py | #!/usr/bin/env python3
import os, tempfile
from cozify import config
from cozify.test import debug
from cozify.test.fixtures import tmp_hub
def test_config_XDG(tmp_hub):
assert config._initXDG()
def test_config_XDG_env(tmp_hub):
with tempfile.TemporaryDirectory() as td:
os.environ["XDG_CONFIG_HOME"] = td
config.setStatePath(config._initXDG())
assert td in config.state_file
| #!/usr/bin/env python3
import os, tempfile
from cozify import config
from cozify.test import debug
from cozify.test.fixtures import tmp_hub
def test_config_XDG(tmp_hub):
assert config._initXDG()
def test_config_XDG_env(tmp_hub):
with tempfile.TemporaryDirectory() as td:
os.environ["XDG_CONFIG_HOME"] = td
config.setStatePath(config._initXDG())
assert td in config.state_file
def test_config_XDG_basedir(tmp_hub):
# using mktemp deliberately to let _initXDG create it
td = tempfile.mktemp()
os.environ["XDG_CONFIG_HOME"] = td
assert config._initXDG()
assert os.path.isdir(td)
os.removedirs(td + '/python-cozify')
| Test for XDG basedir creation | Test for XDG basedir creation
| Python | mit | Artanicus/python-cozify,Artanicus/python-cozify |
2d067d0dbf4f04203c9bda2d8fb48d58fae3913d | datapoints/sql_queries.py | datapoints/sql_queries.py |
## this should show in red if the COUNT is less than the total
## number of regions that exist for that relationshiop
show_region_aggregation = '''
SELECT
i.name
, SUM(d.value) as value
, r.full_name
FROM region_relationship rr
INNER JOIN datapoint d
ON rr.region_1_id = d.region_id
INNER JOIN indicator i
ON d.indicator_id = i.id
INNER JOIN region r
ON rr.region_0_id = r.id
GROUP BY r.full_name, i.name,i.id ,d.campaign_id
'''
|
## this should show in red if the COUNT is less than the total
## number of regions that exist for that relationshiop
show_region_aggregation = '''
SELECT
i.name
, SUM(d.value) as value
, r.name
FROM region_relationship rr
INNER JOIN datapoint d
ON rr.region_1_id = d.region_id
INNER JOIN indicator i
ON d.indicator_id = i.id
INNER JOIN region r
ON rr.region_0_id = r.id
GROUP BY r.name, i.name,i.id ,d.campaign_id
'''
| Fix a bug in the region aggregation query. | Fix a bug in the region aggregation query.
There is no full_name column for regions; it is just name.
| Python | agpl-3.0 | SeedScientific/polio,unicef/rhizome,unicef/polio,unicef/polio,unicef/rhizome,unicef/rhizome,SeedScientific/polio,unicef/rhizome,SeedScientific/polio,SeedScientific/polio,unicef/polio,unicef/polio,SeedScientific/polio |
af848bfd9f50fe8d5272be927c8d93c26e45c3cd | dask_ndmeasure/__init__.py | dask_ndmeasure/__init__.py | # -*- coding: utf-8 -*-
__author__ = """John Kirkham"""
__email__ = "[email protected]"
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| # -*- coding: utf-8 -*-
__author__ = """John Kirkham"""
__email__ = "[email protected]"
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| Add another newline before author/date info | Add another newline before author/date info
| Python | bsd-3-clause | dask-image/dask-ndmeasure |
9a9ecde6f88a6c969f23dbcfc5bbc7e611f7f138 | version_info/get_version.py | version_info/get_version.py | import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield (name,) + version_func(path)
| import collections
import git
import version_info.exceptions
__all__ = (
'get_git_version',
'find_versions',
)
VersionSpec = collections.namedtuple('VersionSpec', ('name', 'tag', 'commit'))
def get_git_version(path):
repo = git.Repo(path)
head_commit = repo.head.ref.commit
for tag in repo.tags:
if tag.commit == head_commit:
return tag.name, head_commit.hexsha
return None, head_commit.hexsha
GET_VERSION_MAPPING = {
'git': get_git_version,
}
def find_versions(repo_list):
"""
Passing a list of tuples that consist of:
('reference_name', 'git', '/full/path/to/repo')
Where:
* reference_name can be anything and it will be yielded back in name
* second element is the VCS type; for a list of supported VCS's see
README.rst
You receive a list of namedtuples:
[
(name='reference_name', tag='1.0', commit='fb666d55d3')
]
:param repo_list: list of tuples as specified
:return: list of namedtuples
"""
for name, vcs_type, path in repo_list:
vcs_type_normalized = vcs_type.lower()
try:
version_func = GET_VERSION_MAPPING[vcs_type_normalized]
except KeyError as exc:
raise version_info.exceptions.VCSNotSupported(exc.args[0])
yield VersionSpec(name, *version_func(path))
| Make find_versions return a namedtuple as documented | Make find_versions return a namedtuple as documented
| Python | mit | TyMaszWeb/python-version-info |
01036133ed749d96a74bafb6b3f8670c06c63a84 | 1selfOpenDashboardCommand.py | 1selfOpenDashboardCommand.py | import sublime, sublime_plugin, webbrowser
QD_URL = "https://app.1self.co"
class GoTo1selfDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "1self.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
VERSION = SETTINGS.get("VERSION")
qd_url = QD_URL
url = "%(qd_url)s/dashboard?streamId=%(stream_id)s&readToken=%(read_token)s&source=app-id-598358b6aacda229634d443c9539662b&version=%(VERSION)s" % locals()
print(url)
webbrowser.open_new_tab(url) | import sublime, sublime_plugin, webbrowser
QD_URL = "http://www.1self.co"
class GoTo1selfDashboardCommand(sublime_plugin.TextCommand):
def run(self,edit):
SETTINGS = {}
SETTINGS_FILE = "1self.sublime-settings"
SETTINGS = sublime.load_settings(SETTINGS_FILE)
stream_id = SETTINGS.get("streamId")
read_token = SETTINGS.get("readToken")
VERSION = SETTINGS.get("VERSION")
qd_url = QD_URL
url = "%(qd_url)s/?streamid=%(stream_id)s&readToken=%(read_token)s&appid=app-id-598358b6aacda229634d443c9539662b&version=%(VERSION)s" % locals()
print(url)
webbrowser.open_new_tab(url) | Change landing URLs to website | Change landing URLs to website
| Python | apache-2.0 | 1self/sublime-text-plugin,1self/sublime-text-plugin,1self/sublime-text-plugin |
29421f62acae813097ab84973cc352ebd9ff70c6 | blockbuster/example_config_files/example_config.py | blockbuster/example_config_files/example_config.py | # General Settings
timerestriction = False
debug_mode = True
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222" | # General Settings
timerestriction = False
debug_mode = True
log_directory = './logs'
# Email Settings
# emailtype = "Gmail"
emailtype = "Console"
# SMS Settings
# outboundsmstype = "WebService"
outboundsmstype = "Console"
# Twilio Auth Keys
account_sid = "twilio sid here"
auth_token = "auth token here"
# SMS Services Auth
basic_auth = 'basic auth header here'
spsms_host = 'host here'
spsms_url = 'url here'
# Postgres Connection Details
pg_host = 'localhost'
pg_dbname = 'blockbuster'
pg_user = 'blockbuster'
pg_passwd = 'blockbuster'
# Proxy Details
proxy_user = ''
proxy_pass = ''
proxy_host = ''
proxy_port = 8080
# Testing
test_to_number = ''
test_from_number = ''
# Pushover Keys
pushover_app_token = "pushover_token"
# Email Configuration
smtp_server = 'smtp.gmail.com:587'
mail_username = ''
mail_fromaddr = mail_username
mail_password = ''
mail_monitoring_addr = ''
# API Variables
api_username = "username here"
api_passphrase = "passphrase here"
# New Number
return_number = "+440000111222" | Add new configuration setting for log_directory | Add new configuration setting for log_directory
| Python | mit | mattstibbs/blockbuster-server,mattstibbs/blockbuster-server |
c5609fe1b48cdd5740215c1d0783eaafdfe2e76b | listen/__init__.py | listen/__init__.py | #!/usr/bin/python
# -*- coding: utf8 -*-
"""
The MIT License (MIT)
Copyright (c) 2014 Jarl Stefansson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from __future__ import print_function # This API requires Python 2.7 or more recent
import sys
if sys.version < "2.7.0":
print("listen requires Python 2.7 or more recent")
sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.0"
| #!/usr/bin/python
# -*- coding: utf8 -*-
"""
The MIT License (MIT)
Copyright (c) 2014 Jarl Stefansson
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# Prepare for deprication of versions < 2.7
#from __future__ import print_function # This API requires Python 2.7 or more recent
#import sys
#if sys.version < "2.7.0":
# print("listen requires Python 2.7 or more recent")
# sys.exit(1)
from listen.signal_handler import SignalHandler
__all__ = ["listen"]
__version__ = "0.1.1"
| Remove requirement on python > 2.7 | Remove requirement on python > 2.7
| Python | mit | antevens/listen,antevens/listen |
1b179405245bc7d7d6157528bd64e2b399491090 | quantecon/optimize/__init__.py | quantecon/optimize/__init__.py | """
Initialization of the optimize subpackage
"""
from .scalar_maximization import brent_max
from .root_finding import *
| """
Initialization of the optimize subpackage
"""
from .scalar_maximization import brent_max
from .root_finding import newton, newton_halley, newton_secant, bisect, brentq
| Fix import to list items | Fix import to list items
| Python | bsd-3-clause | oyamad/QuantEcon.py,QuantEcon/QuantEcon.py,oyamad/QuantEcon.py,QuantEcon/QuantEcon.py |
fa7172a5e3231e738d85df3baba130fdec7497d1 | derrida/outwork/views.py | derrida/outwork/views.py | from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=True)
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
| from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean, Raw
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=Raw(True))
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
| Fix outwork list view to properly filter on published=true in Solr | Fix outwork list view to properly filter on published=true in Solr
| Python | apache-2.0 | Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django |
0c8ab03600fa806a109861f0e560e3b3a6850a66 | nbgrader/apps/formgradeapp.py | nbgrader/apps/formgradeapp.py | from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
from nbgrader.templates import get_template_path
aliases = {}
aliases.update(base_aliases)
aliases.update({
'regexp': 'FindStudentID.regexp'
})
flags = {}
flags.update(base_flags)
flags.update({
'serve': (
{'FormgradeApp': {'postprocessor_class': 'nbgrader.postprocessors.ServeFormGrader'}},
"Run the form grading server"
)
})
class FormgradeApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-formgrade')
description = Unicode(u'Grade a notebook using an HTML form')
aliases = aliases
flags = flags
student_id = Unicode(u'', config=True)
def _export_format_default(self):
return 'html'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.FindStudentID'
]
self.extra_config.Exporter.template_file = 'formgrade'
self.extra_config.Exporter.template_path = ['.', get_template_path()]
self.config.merge(self.extra_config)
| from IPython.config.loader import Config
from IPython.utils.traitlets import Unicode
from nbgrader.apps.customnbconvertapp import CustomNbConvertApp
from nbgrader.apps.customnbconvertapp import aliases as base_aliases
from nbgrader.apps.customnbconvertapp import flags as base_flags
from nbgrader.templates import get_template_path
aliases = {}
aliases.update(base_aliases)
aliases.update({
'regexp': 'FindStudentID.regexp'
})
flags = {}
flags.update(base_flags)
flags.update({
'serve': (
{'FormgradeApp': {'postprocessor_class': 'nbgrader.postprocessors.ServeFormGrader'}},
"Run the form grading server"
)
})
class FormgradeApp(CustomNbConvertApp):
name = Unicode(u'nbgrader-formgrade')
description = Unicode(u'Grade a notebook using an HTML form')
aliases = aliases
flags = flags
ipython_dir = "/tmp"
student_id = Unicode(u'', config=True)
def _export_format_default(self):
return 'html'
def build_extra_config(self):
self.extra_config = Config()
self.extra_config.Exporter.preprocessors = [
'nbgrader.preprocessors.FindStudentID'
]
self.extra_config.Exporter.template_file = 'formgrade'
self.extra_config.Exporter.template_path = ['.', get_template_path()]
self.config.merge(self.extra_config)
| Use default IPython profile when converting to HTML | Use default IPython profile when converting to HTML
| Python | bsd-3-clause | ellisonbg/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,modulexcite/nbgrader,jhamrick/nbgrader,jdfreder/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,EdwardJKim/nbgrader,MatKallada/nbgrader,alope107/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,dementrock/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,modulexcite/nbgrader,jupyter/nbgrader,alope107/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jdfreder/nbgrader,MatKallada/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader |
037e15f383c326f1f4e7de59bc3ec3520ac6ce40 | pystachio/__init__.py | pystachio/__init__.py | __author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
| __author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
import sys
if sys.version_info < (2, 6, 5):
raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
| Add check for minimum Python version | Add check for minimum Python version
| Python | mit | wickman/pystachio |
d5b8018d1d722f3b1e980425af79934265b0f3eb | tests/test_navigation.py | tests/test_navigation.py | def test_right_arrows(page):
page.goto("index.html")
while(True):
# Keeps going to the next page until there is no right arrow
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
else:
break
# TODO make a similar test but going from de last page
# to the previous one until it gets to the first one
| def get_menu_titles(page) -> list:
page.goto("index.html")
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
menu_titles = []
for i in menu_list:
menu_item = i.as_element().inner_text()
menu_titles.append(menu_item)
return menu_titles
def test_check_titles(page):
menu_list = get_menu_titles(page)
page.goto("index.html")
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title()
page_title = page_title.split(" — ")[0]
assert page_title == menu_item
else:
break
| Implement assertions and a for instead of a while loop | Implement assertions and a for instead of a while loop
| Python | agpl-3.0 | PyAr/PyZombis,PyAr/PyZombis,PyAr/PyZombis |
c709c58fc128076af5f58d33dcd0983436573d79 | tests/test_parsingapi.py | tests/test_parsingapi.py | from __future__ import unicode_literals, division, absolute_import
from flexget.plugin import get_plugin_by_name, get_plugins
from flexget.plugins.parsers import plugin_parsing
class TestParsingAPI(object):
def test_all_types_handled(self):
declared_types = set(plugin_parsing.PARSER_TYPES)
method_handlers = set(m[6:] for m in dir(get_plugin_by_name('parsing').instance) if m.startswith('parse_'))
assert set(declared_types) == set(method_handlers), \
'declared parser types: %s, handled types: %s' % (declared_types, method_handlers)
def test_parsing_plugins_have_parse_methods(self):
for parser_type in plugin_parsing.PARSER_TYPES:
for plugin in get_plugins(group='%s_parser' % parser_type):
assert hasattr(plugin.instance, 'parse_%s' % parser_type), \
'{type} parsing plugin {name} has no parse_{type} method'.format(type=parser_type, name=plugin.name)
| from __future__ import unicode_literals, division, absolute_import
from flexget.plugin import get_plugin_by_name, get_plugins
from flexget.plugins.parsers import plugin_parsing
class TestParsingAPI(object):
def test_all_types_handled(self):
declared_types = set(plugin_parsing.PARSER_TYPES)
method_handlers = set(m[6:] for m in dir(get_plugin_by_name('parsing').instance) if m.startswith('parse_'))
assert set(declared_types) == set(method_handlers), \
'declared parser types: %s, handled types: %s' % (declared_types, method_handlers)
def test_parsing_plugins_have_parse_methods(self):
for parser_type in plugin_parsing.PARSER_TYPES:
for plugin in get_plugins(group='%s_parser' % parser_type):
assert hasattr(plugin.instance, 'parse_%s' % parser_type), \
'{type} parsing plugin {name} has no parse_{type} method'.format(type=parser_type, name=plugin.name)
class TestTaskParsing(object):
config = """
tasks:
explicit_parser:
parsing:
movie: guessit
series: guessit
"""
def test_selected_parser_cleared(self, manager, execute_task):
# make sure when a non-default parser is installed on a task, it doesn't affect other tasks
execute_task('explicit_parser')
assert not plugin_parsing.selected_parsers
| Add a test to verify plugin_parsing clears selected parsers after task | Add a test to verify plugin_parsing clears selected parsers after task
| Python | mit | tobinjt/Flexget,Flexget/Flexget,jawilson/Flexget,sean797/Flexget,OmgOhnoes/Flexget,poulpito/Flexget,antivirtel/Flexget,ianstalk/Flexget,JorisDeRieck/Flexget,tarzasai/Flexget,Pretagonist/Flexget,malkavi/Flexget,dsemi/Flexget,sean797/Flexget,tobinjt/Flexget,Pretagonist/Flexget,crawln45/Flexget,Danfocus/Flexget,tobinjt/Flexget,oxc/Flexget,gazpachoking/Flexget,qk4l/Flexget,jawilson/Flexget,ianstalk/Flexget,poulpito/Flexget,drwyrm/Flexget,JorisDeRieck/Flexget,antivirtel/Flexget,gazpachoking/Flexget,Danfocus/Flexget,sean797/Flexget,Flexget/Flexget,qk4l/Flexget,jacobmetrick/Flexget,JorisDeRieck/Flexget,qk4l/Flexget,jawilson/Flexget,oxc/Flexget,Flexget/Flexget,qvazzler/Flexget,Danfocus/Flexget,jacobmetrick/Flexget,LynxyssCZ/Flexget,crawln45/Flexget,ianstalk/Flexget,tarzasai/Flexget,Danfocus/Flexget,tarzasai/Flexget,malkavi/Flexget,crawln45/Flexget,OmgOhnoes/Flexget,crawln45/Flexget,dsemi/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,dsemi/Flexget,jacobmetrick/Flexget,antivirtel/Flexget,poulpito/Flexget,qvazzler/Flexget,LynxyssCZ/Flexget,jawilson/Flexget,Pretagonist/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,oxc/Flexget,tobinjt/Flexget,Flexget/Flexget,OmgOhnoes/Flexget,malkavi/Flexget,drwyrm/Flexget,qvazzler/Flexget,drwyrm/Flexget |
278b17859e4ad7464098a715777fcb755acf258c | doTranscode.py | doTranscode.py | #!/usr/bin/env python
import encoders
import decoders
import config
import tempfile
import os
def transcode(inF, outF, options, type=None):
"Transcodes a file"
if type == None:
type = os.path.splitext(outF)[1][1:].lower()
#Get the file's metadata
meta = decoders.getMetadata(inF)
#Decode the file
f = tempfile.NamedTemporaryFile()
inF_real = decoders.decode(inF, f.name)
if not inF_real:
return False
#Encode it
succ = encoders.encode(inF_real, outF, type, options, meta)
#Clean up
f.close()
return succ
| #!/usr/bin/env python
import encoders
import decoders
import config
import tempfile
import os
def transcode(inF, outF, options, type=None):
"Transcodes a file"
if type == None:
type = os.path.splitext(outF)[1][1:].lower()
#Get the file's metadata
meta = decoders.getMetadata(inF)
#Decode the file
f = tempfile.NamedTemporaryFile(suffix=".wav")
inF_real = decoders.decode(inF, f.name)
if not inF_real:
return False
#Encode it
succ = encoders.encode(inF_real, outF, type, options, meta)
#Clean up
f.close()
return succ
| Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file | Make sure that the temporary file has a `wav` extension because a certain encoder was designed for Windows and thinks that you would never possibly have a file without an extension so adds `.wav` if it's not there on the input file | Python | isc | jeffayle/Transcode |
81069682d724c0a1e2cd292e286e4148cd9c3d9d | scraping/IEEE/main.py | scraping/IEEE/main.py | """IEEE Xplore API Request.
Usage:
IEEE/main.py -h [-au AUTHOR] [-ti TITLE] [-ab ABSTRACT] [-py YEAR] [-hc
NUMBER]
Options:
-h --help show this
-au AUTHOR Terms to search for in Author [default: ""]
-ti TITLE Terms to search for in Title [default: ""]
-ab ABSTRACT Terms to search for in the Abstract [default: ""]
-py YEAR Terms to search for in Year [default: ""]
-hc NUMBER Number of records to fetch. [default: 25]
"""
from scraping.tools import *
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='IEEE Xplore API Request')
parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'],
arguments['-py'], arguments['-hc']]
standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?'
url = create_url_search(parameters=parameters, standard=standard)
root = fetch_xml(url)
parents = root.getchildren()
[parents.remove(parents[0]) for _ in range(2)]
for document in parents:
article = xml_to_dict(document)
post = iee_to_axelbib(article)
send = post_to_axelbib(post)
| """IEEE Xplore API Request.
Usage:
IEEE/main.py -h [-au AUTHOR] [-ti TITLE] [-ab ABSTRACT] [-py YEAR] [-hc
NUMBER]
Options:
-h --help show this
-au AUTHOR Terms to search for in Author [default: ""]
-ti TITLE Terms to search for in Title [default: ""]
-ab ABSTRACT Terms to search for in the Abstract [default: ""]
-py YEAR Terms to search for in Year [default: ""]
-hc NUMBER Number of records to fetch. [default: 25]
"""
from scraping.tools import *
from docopt import docopt
if __name__ == '__main__':
arguments = docopt(__doc__, version='IEEE Xplore API Request')
parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'],
arguments['-py'], arguments['-hc']]
standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?'
url = create_url_search(parameters=parameters, standard=standard)
root = fetch_xml(url)
parents = root.getchildren()
for _ in range(2): parents.remove(parents[0])
for document in parents:
article = xml_to_dict(document)
post = iee_to_axelbib(article)
send = post_to_axelbib(post)
| Fix loop to delete branches from xml. | Fix loop to delete branches from xml.
| Python | mit | ArcasProject/Arcas |
4b6ca2d806d31daa77825f015c6047c8ec3b16d6 | python/startup.py | python/startup.py | # pylint: disable=unused-import, unused-variable, missing-docstring
def _readline():
try:
import readline
except ImportError:
print("Module readline not available.")
else:
import rlcompleter
readline.parse_and_bind("tab: complete")
import os
histfile = os.path.join(os.environ["HOME"], '.python_history')
try:
readline.read_history_file(histfile)
except IOError:
pass
import atexit
atexit.register(readline.write_history_file, histfile)
del os, histfile
_readline()
del _readline
import sys
sys.ps1 = "\001\033[01;33m\002>>>\001\033[00m\002 "
sys.ps2 = "\001\033[01;33m\002...\001\033[00m\002 "
| # pylint: disable=unused-import, unused-variable, missing-docstring
def _readline():
try:
import readline
except ImportError:
print("Module readline not available.")
else:
import rlcompleter
readline.parse_and_bind("tab: complete")
import os
histfile = os.path.join(os.environ["HOME"], 'python', '.history')
try:
readline.read_history_file(histfile)
except IOError:
pass
import atexit
atexit.register(readline.write_history_file, histfile)
del os, histfile
_readline()
del _readline
import sys
sys.ps1 = "\001\033[01;33m\002>>>\001\033[00m\002 "
sys.ps2 = "\001\033[01;33m\002...\001\033[00m\002 "
| Move python rl history file | Move python rl history file
just to help clean up ~/
| Python | mit | rwstauner/run_control,rwstauner/run_control,rwstauner/run_control,rwstauner/run_control,rwstauner/run_control |
ae897509ecc7f190b31cc34085aacf81e45bc36e | nflpool/data/secret-config.py | nflpool/data/secret-config.py | from nflpool.data.dbsession import DbSessionFactory
# You will need an account from MySportsFeed to access their API. They offer free access to developers
# Edit below with your credentials and then save as secret.py
msf_username = 'YOURUSERNAME'
msf_pw = 'YOURPASSWORD'
su_email = ''
slack_webhook_url = ''
| from nflpool.data.dbsession import DbSessionFactory
# You will need an account from MySportsFeed to access their API. They offer free access to developers
# Edit below with your credentials and then save as secret.py
msf_username = 'YOURUSERNAME'
msf_pw = 'YOURPASSWORD'
su_email = ''
slack_webhook_url = ''
msf_api = 'YOUR API KEY'
msf_v2pw = 'MYSPORTSFEEDS' | Add the MSF API key and password fields | Add the MSF API key and password fields
| Python | mit | prcutler/nflpool,prcutler/nflpool |
df4c12d9e2b07db9aa9a1406f61020eb78998bef | nickenbot/command/__init__.py | nickenbot/command/__init__.py | import os
import string
import importlib
import traceback
from .. import irc
def execute(**kwargs):
module_string = string.join([__name__, kwargs['command']], '.')
module = None
try:
module = importlib.import_module(module_string)
except ImportError as e:
traceback.print_exc()
irc.send_to_channel(kwargs['channel'], "No such command.")
if not module == None:
module.execute(**kwargs)
| import os
import fnmatch
import string
import importlib
import traceback
from .. import irc
def get_all():
files = os.listdir('./nickenbot/command')
files.remove('__init__.py')
commands = [os.path.splitext(f)[0] for f in files if fnmatch.fnmatch(f, '*.py')]
commands = [string.replace(c, '_', '-') for c in commands]
return commands
def execute(**kwargs):
print(kwargs['command'])
command = string.replace(kwargs['command'], '-', '_')
print(command)
module_string = string.join([__name__, command], '.')
module = None
try:
module = importlib.import_module(module_string)
except ImportError as e:
traceback.print_exc()
irc.send_to_channel(kwargs['channel'], "No such command.")
if not module == None:
module.execute(**kwargs)
| Add support for hyphens, and list of commands | Add support for hyphens, and list of commands
Adds a function to retrieve all commands, and converts incoming commands
from hyphenated to underscored form.
| Python | mit | brlafreniere/nickenbot,brlafreniere/nickenbot |
18059a0515ea5f6edf87e8485200f001503459cd | info-txt.py | info-txt.py | # XML Parsing
import xml.etree.ElementTree as ET
# HTML output
import dominate as dom
from dominate.tags import *
# Interact with user machine
import datetime
from sys import argv
import os
import time
import webbrowser
second = 1000
minute = 60000
hour = 3600000
class SMS:
'''base SMS class to store a single message'''
def __init__(self, date, party, message):
self.date = date
self.message = message
self.party = party
self.responseTime = 0
def transcribe(root, party1, party2):
'''simplify the extracted SMS XML tree'''
SMSlist = []
for sms in root.findall('sms'):
newSMS = SMS(sms.attrib['date'], sms.attrib['type'], sms.attrib['body'])
SMSlist.append(newSMS)
return SMSlist
def main(party1, party2):
'''main function that executes program function'''
messages = transcribe(ET.parse('sms.xml').getroot(), party1, party2)
if __name__ == '__main__':
if (len(argv) < 3):
raise Exception('Please enter your name and then your friend\'s name')
main(argv[1], argv[2])
| # XML Parsing
import xml.etree.ElementTree as ET
# HTML output
import dominate as dom
from dominate.tags import *
# Interact with user machine
import datetime
from sys import argv
import os
import time
import webbrowser
second = 1000
minute = 60000
hour = 3600000
class SMS:
'''base SMS class to store a single message'''
def __init__(self, date, party, message):
self.date = date
self.message = message
self.party = party
self.responseTime = 0
def transcribe(root, party1, party2):
'''simplify the extracted SMS XML tree'''
SMSlist = []
for sms in root.findall('sms'):
newSMS = SMS(sms.attrib['date'], sms.attrib['type'], sms.attrib['body'])
SMSlist.append(newSMS)
# Traverse the list backwards to find out when the sms was responded to
for prompt in reversed(SMSlist):
if prompt.party == sms.party:
break
else:
sms.responseTime = sms.date - prompt.date
return SMSlist
def main(party1, party2):
'''main function that executes program function'''
messages = transcribe(ET.parse('sms.xml').getroot(), party1, party2)
if __name__ == '__main__':
if (len(argv) < 3):
raise Exception('Please enter your name and then your friend\'s name')
main(argv[1], argv[2])
| Determine response time for messages | Determine response time for messages
| Python | mit | 2nd47/info-txt |
d9b06edb63d20550c4b3fa0fa6924d99724dc11a | examples/image_resize.py | examples/image_resize.py | from __future__ import print_function
from transloadit.client import Transloadit
tl = Transloadit('TRANSLOADIT_KEY', 'TRANSLOADIT_SECRET')
ass = tl.new_assembly()
ass.add_file(open('fixtures/lol_cat.jpg', 'rb'))
ass.add_step('resize', '/image/resize', {'width': 70, 'height': 70})
response = ass.create(wait=True)
result_url = response.data.get('results').get('resize')[0].get('ssl_url')
print('Your result:', result_url)
| from transloadit.client import Transloadit
tl = Transloadit("TRANSLOADIT_KEY", "TRANSLOADIT_SECRET")
ass = tl.new_assembly()
ass.add_file(open("fixtures/lol_cat.jpg", "rb"))
ass.add_step("resize", "/image/resize", {"width": 70, "height": 70})
response = ass.create(wait=True)
result_url = response.data.get("results").get("resize")[0].get("ssl_url")
print("Your result:", result_url)
| Update example syntax to python3 | Update example syntax to python3
| Python | mit | ifedapoolarewaju/transloadit-python-sdk |
b2b84adc6af10757f48845e830a55b40a9fa7ead | breakpad.py | breakpad.py | # Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() == 'y':
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
@atexit.register
def CheckForException():
if 'test' in sys.modules['__main__'].__file__:
# Probably a unit test.
return
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
| # Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() == 'y':
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
#@atexit.register
def CheckForException():
if 'test' in sys.modules['__main__'].__file__:
# Probably a unit test.
return
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
| Disable braekpad automatic registration while we figure out stuff | Disable braekpad automatic registration while we figure out stuff
Review URL: http://codereview.chromium.org/462022
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@33686 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
| Python | bsd-3-clause | svn2github/chromium-depot-tools,svn2github/chromium-depot-tools,svn2github/chromium-depot-tools |
3dcece1bb4e2490168b21d4298e297e61bdde901 | corehq/ex-submodules/casexml/apps/case/fixtures.py | corehq/ex-submodules/casexml/apps/case/fixtures.py | from casexml.apps.case.xml.generator import safe_element
from casexml.apps.phone.xml import get_casedb_element
class CaseDBFixture(object):
"""Used to provide a casedb-like structure as a fixture
Does not follow the standard FixtureGenerator pattern since it is currently
not used during a regular sync operation, and is user-agnostic
"""
id = "case"
def __init__(self, cases):
if not isinstance(cases, list):
self.cases = [cases]
else:
self.cases = cases
@property
def fixture(self):
"""For a list of cases, return a fixture with all case properties
<fixture id="case">
<case case_id="" case_type="" owner_id="" status="">
<case_name/>
<date_opened/>
<last_modified/>
<case_property />
<index>
<a12345 case_type="" relationship="" />
</index>
<attachment>
<a12345 />
</attachment>
</case>
<case>
...
</case>
</fixture>
"""
element = safe_element("fixture")
element.attrib = {'id': self.id}
for case in self.cases:
element.append(get_casedb_element(case))
return element
| from casexml.apps.case.xml.generator import safe_element
from casexml.apps.phone.xml import get_casedb_element
class CaseDBFixture(object):
"""Used to provide a casedb-like structure as a fixture
Does not follow the standard FixtureGenerator pattern since it is currently
not used during a regular sync operation, and is user-agnostic
"""
id = "case"
def __init__(self, cases):
if not isinstance(cases, list):
self.cases = [cases]
else:
self.cases = cases
@property
def fixture(self):
"""For a list of cases, return a fixture with all case properties
<fixture id="case">
<case case_id="" case_type="" owner_id="" status="">
<case_name/>
<date_opened/>
<last_modified/>
<case_property />
<index>
<a12345 case_type="" relationship="" />
</index>
<attachment>
<a12345 />
</attachment>
</case>
<case>
...
</case>
</fixture>
https://github.com/dimagi/commcare/wiki/casedb
https://github.com/dimagi/commcare/wiki/fixtures
"""
element = safe_element("fixture")
element.attrib = {'id': self.id}
for case in self.cases:
element.append(get_casedb_element(case))
return element
| Add links to fixture and casedb specs | Add links to fixture and casedb specs
| Python | bsd-3-clause | qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq |
28e67e04a88b0195184bf43f013c11ea7f320c4f | conveyor/processor.py | conveyor/processor.py | from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
class BulkProcessor(BaseProcessor):
def process(self):
pass
| from __future__ import absolute_import
from __future__ import division
from xmlrpc2 import client as xmlrpc2
class BaseProcessor(object):
def __init__(self, index, *args, **kwargs):
super(BaseProcessor, self).__init__(*args, **kwargs)
self.index = index
self.client = xmlrpc2.Client(self.index)
def process(self):
raise NotImplementedError
def get_releases(self, name, version=None):
if version is None:
return set(self.client.package_releases(name, True))
else:
return set([version])
class BulkProcessor(BaseProcessor):
def process(self):
pass
| Add a method for getting a list of releases to fetch | Add a method for getting a list of releases to fetch
| Python | bsd-2-clause | crateio/carrier |
c694ac630f36c53c130a63908c6c3576f220a6bd | django-openstack/django_openstack/auth/__init__.py | django-openstack/django_openstack/auth/__init__.py | import django_openstack.urls
class Roles:
USER = 'user'
PROJECT_ADMIN = 'projadmin'
SOFTWARE_ADMIN = 'softadmin'
HARDWARE_ADMIN = 'hardadmin'
ALL_ROLES = (HARDWARE_ADMIN, SOFTWARE_ADMIN,
PROJECT_ADMIN, USER)
@staticmethod
def get_max_role(roles):
if not roles:
return Roles.USER
for role in Roles.ALL_ROLES:
if role in roles:
if role in django_openstack.urls.topbars:
return role
else:
return Roles.USER
@staticmethod
def needs_tenant(roles):
return not (Roles.HARDWARE_ADMIN in roles) and not (Roles.SOFTWARE_ADMIN in roles)
| import django_openstack.urls
class Roles:
USER = 'user'
PROJECT_ADMIN = 'projadmin'
SOFTWARE_ADMIN = 'softadmin'
HARDWARE_ADMIN = 'hardadmin'
ALL_ROLES = (HARDWARE_ADMIN, SOFTWARE_ADMIN,
PROJECT_ADMIN, USER)
@staticmethod
def get_max_role(roles):
if not roles:
return Roles.USER
for role in Roles.ALL_ROLES:
if role in roles:
if role in django_openstack.urls.topbars:
return role
return Roles.USER
@staticmethod
def needs_tenant(roles):
return not (Roles.HARDWARE_ADMIN in roles) and not (Roles.SOFTWARE_ADMIN in roles)
| Return 'user' role as default value | Return 'user' role as default value
| Python | apache-2.0 | griddynamics/osc-robot-openstack-dashboard,griddynamics/osc-robot-openstack-dashboard,griddynamics/osc-robot-openstack-dashboard |
2a986d7c0bab1612e96cace5ce54a188e22af2aa | services/wordpress.py | services/wordpress.py | import json
import foauth
class Wordpress(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.wordpress.com/'
favicon_url = 'http://s2.wp.com/i/favicon.ico'
docs_url = 'http://developer.wordpress.com/docs/api/'
# URLs to interact with the API
authorize_url = 'https://public-api.wordpress.com/oauth2/authorize'
access_token_url = 'https://public-api.wordpress.com/oauth2/token'
api_domain = 'public-api.wordpress.com'
available_permissions = [
(None, 'read and post to your blog'),
]
def parse_token(self, content):
data = json.loads(content)
return data['access_token'], None
| import json
import foauth.providers
class Wordpress(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.wordpress.com/'
favicon_url = 'http://s2.wp.com/i/favicon.ico'
docs_url = 'http://developer.wordpress.com/docs/api/'
# URLs to interact with the API
authorize_url = 'https://public-api.wordpress.com/oauth2/authorize'
access_token_url = 'https://public-api.wordpress.com/oauth2/token'
api_domain = 'public-api.wordpress.com'
available_permissions = [
(None, 'read and post to your blog'),
]
def parse_token(self, content):
data = json.loads(content)
return data['access_token'], None
| Fix the import for Wordpress | Fix the import for Wordpress
| Python | bsd-3-clause | foauth/foauth.org,foauth/foauth.org,foauth/foauth.org |
fbad3c0b80258b02cc2ba81ff1408d24cd69c69d | src/iconclassserver/util.py | src/iconclassserver/util.py | import redis
import json
from django.conf import settings
import iconclass
import requests
import time
def handle_githubpushes():
redis_c = redis.StrictRedis()
while True:
data = redis_c.lpop(settings.REDIS_PREFIX + '_gitpushes')
if not data: break
data = json.loads(data)
full_name = data['repository']['full_name']
for commit in data.get('commits', []):
committer = commit['committer']['email']
timestamp = commit['timestamp']
commit_id = commit['id']
for filename in commit['modified']:
if filename.startswith('data/'):
fn, language = iconclass.action(filename[5:])
if not fn: continue
r = requests.get('https://raw.githubusercontent.com/'+full_name+'/master/'+filename)
if r.status_code == 200:
fn(r.content, language)
buf = [time.strftime('%Y%m%d %H:%M:%S'), committer, filename, timestamp, commit_id]
redis_c.lpush(settings.REDIS_PREFIX + '_gitpushlog', ' '.join(buf)) | import redis
import json
from django.conf import settings
import iconclass
import requests
import time
import os
def handle_githubpushes():
redis_c = redis.StrictRedis()
while True:
data = redis_c.lpop(settings.REDIS_PREFIX + '_gitpushes')
if not data: break
data = json.loads(data)
full_name = data['repository']['full_name']
for commit in data.get('commits', []):
committer = commit['committer']['email']
timestamp = commit['timestamp']
commit_id = commit['id']
for filename in commit['modified']:
if filename.startswith('data/'):
filepath, filename = os.path.split(filename)
fn, language = iconclass.action(filename[5:])
if not fn: continue
r = requests.get('https://raw.githubusercontent.com/'+full_name+'/master/'+filename)
if r.status_code == 200:
fn(r.content, language)
buf = [time.strftime('%Y%m%d %H:%M:%S'), committer, filename, timestamp, commit_id]
redis_c.lpush(settings.REDIS_PREFIX + '_gitpushlog', ' '.join(buf)) | Handle filenames with path prefixes in git commit logs | Handle filenames with path prefixes in git commit logs
| Python | mit | epoz/iconclass-server,epoz/iconclass-server |
78ebec64e51c43005488bc1b9ce84fca65d069e4 | planet_alignment/app/app_factory.py | planet_alignment/app/app_factory.py | """
.. module:: app_factory
:platform: linux
:synopsis:
.. moduleauthor:: Paul Fanelli <[email protected]>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
return App(self._system_data, self._plugins, self._time)
| """
.. module:: app_factory
:platform: linux
:synopsis:
.. moduleauthor:: Paul Fanelli <[email protected]>
.. modulecreated:: 6/27/15
"""
from zope.interface import implements
from planet_alignment.app.app import App
from planet_alignment.app.interface import IAppFactory
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.data.system_data import SystemData
from planet_alignment.mgr.plugins_mgr import PluginsManager
class AppFactory(object):
"""This is the class factory for the App.
- **parameters** and **types**::
:param cmd_args: The command-line args.
:type cmd_args: argparse Namespace
"""
implements(IAppFactory)
def __init__(self, cmd_args):
data = BunchParser().parse(cmd_args.config)
self._system_data = SystemData(data)
self._plugins = PluginsManager(cmd_args.plugins)
self._time = cmd_args.time
def create(self):
"""Returns the created App object.
:return: Returns the App object.
:rtype: App class.
"""
return App(self._system_data, self._plugins, self._time)
| Document the AppFactory, add the doc headers. | Document the AppFactory, add the doc headers.
| Python | mit | paulfanelli/planet_alignment |
e7e35e3b40acb1d9b23538631a1b2a37f7723dfc | tests/util.py | tests/util.py | # -*- coding: utf-8 -*-
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals, absolute_import
import pytest
import requests
import uuid
from six import string_types
def uuid_value():
return uuid.uuid4().hex
def is_string_type(obj):
"""
Test whether obj is a string type
:param obj: object to test
:return: bool, whether obj is a string type
"""
return any(isinstance(obj, strtype)
for strtype in string_types)
def has_connection():
try:
requests.get("https://github.com/")
return True
except requests.ConnectionError:
return False
# In case we run tests in an environment without internet connection.
requires_internet = pytest.mark.skipif(not has_connection(), reason="requires internet connection")
| # -*- coding: utf-8 -*-
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals, absolute_import
import pytest
import requests
import uuid
from six import string_types
def uuid_value():
return uuid.uuid4().hex
def is_string_type(obj):
"""
Test whether obj is a string type
:param obj: object to test
:return: bool, whether obj is a string type
"""
return any(isinstance(obj, strtype)
for strtype in string_types)
def has_connection():
try:
requests.head("http://github.com/")
return True
except requests.ConnectionError:
return False
# In case we run tests in an environment without internet connection.
requires_internet = pytest.mark.skipif(not has_connection(), reason="requires internet connection")
| Make Internet connection check a little faster | Make Internet connection check a little faster
Testing whether Internet connect is available only cares about if the
remote server is able to response something, but not the concrete
content returned. So, when remote server responses 301, it's enough, no
need to wait for a 200 response with the real content from redirected
URL.
Signed-off-by: Chenxiong Qi <[email protected]>
| Python | bsd-3-clause | projectatomic/atomic-reactor,fr34k8/atomic-reactor,fr34k8/atomic-reactor,projectatomic/atomic-reactor,DBuildService/atomic-reactor,DBuildService/atomic-reactor |
edad01902f8c9d23da106c538d118e28da286821 | lesion/lifio.py | lesion/lifio.py | import javabridge as jv
import bioformats as bf
def start(max_heap_size='8G'):
"""Start the Java Virtual Machine, enabling bioformats IO.
Parameters
----------
max_heap_size : string, optional
The maximum memory usage by the virtual machine. Valid strings
include '256M', '64k', and '2G'. Expect to need a lot.
"""
jv.start_vm(class_path=bf.JARS, max_heap_size=max_heap_size)
def done():
"""Kill the JVM. Once killed, it cannot be restarted.
Notes
-----
See the python-javabridge documentation for more information.
"""
jv.kill_vm()
| import numpy as np
import javabridge as jv
import bioformats as bf
def start(max_heap_size='8G'):
"""Start the Java Virtual Machine, enabling bioformats IO.
Parameters
----------
max_heap_size : string, optional
The maximum memory usage by the virtual machine. Valid strings
include '256M', '64k', and '2G'. Expect to need a lot.
"""
jv.start_vm(class_path=bf.JARS, max_heap_size=max_heap_size)
def done():
"""Kill the JVM. Once killed, it cannot be restarted.
Notes
-----
See the python-javabridge documentation for more information.
"""
jv.kill_vm()
def lif_metadata_string_size(filename):
"""Get the length in bytes of the metadata string of a LIF file.
Parameters
----------
filename : string
Path to the LIF file.
Returns
-------
length : int
The length in bytes of the metadata string.
Notes
-----
This is based on code by Lee Kamentsky. [1]
References
----------
[1] https://github.com/CellProfiler/python-bioformats/issues/8
"""
with open(filename, 'rb') as fd:
fd.read(9)
length = np.frombuffer(fd.read(4), "<i4")[0]
return length
| Add function to determine metadata length | Add function to determine metadata length
| Python | bsd-3-clause | jni/lesion |
6a7a61d514ac738f8de29efe280ecfedfaf72685 | ttrss/auth.py | ttrss/auth.py | from requests.auth import AuthBase
import requests
import json
from exceptions import raise_on_error
class TTRAuth(AuthBase):
def __init__(self, user, password):
self.user = user
self.password = password
def response_hook(self, r, **kwargs):
j = json.loads(r.content)
if int(j['status']) == 0:
return r
sid = None
if r.headers['set-cookie']:
sid = r.headers['set-cookie'].split(';')[0].split('=')[1]
r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid)
else:
sid = r.request.headers['Cookie'].split('=')[1]
res = requests.post(r.request.url, json.dumps({
'sid': sid,
'op': 'login',
'user': self.user,
'password': self.password
}))
raise_on_error(res)
r.request.deregister_hook('response', self.response_hook)
_r = requests.Session().send(r.request)
_r.cookies = r.cookies
raise_on_error(_r)
return _r
def __call__(self, r):
r.register_hook('response', self.response_hook)
return r
| from requests.auth import AuthBase
import requests
import json
from exceptions import raise_on_error
class TTRAuth(AuthBase):
def __init__(self, user, password):
self.user = user
self.password = password
def response_hook(self, r, **kwargs):
j = json.loads(r.content)
if int(j['status']) == 0:
return r
sid = None
if 'ttrss_api_sid' in r.cookies:
sid = r.cookies['ttrss_api_sid']
r.request.headers['Cookie'] = 'ttrss_api_sid={0}'.format(sid)
else:
sid = r.request.headers['Cookie'].split('=')[1]
res = requests.post(r.request.url, json.dumps({
'sid': sid,
'op': 'login',
'user': self.user,
'password': self.password
}))
raise_on_error(res)
r.request.deregister_hook('response', self.response_hook)
_r = requests.Session().send(r.request)
_r.cookies = r.cookies
raise_on_error(_r)
return _r
def __call__(self, r):
r.register_hook('response', self.response_hook)
return r
| Clean up cookie lookup in TTRAuth | Clean up cookie lookup in TTRAuth
| Python | mit | Vassius/ttrss-python |
3aff93b43f880eab72ca205e1f354e7179907132 | fix_removal.py | fix_removal.py | import os
from distutils import sysconfig
# Check to see if the previous version was installed and clean up
# installed-files.txt
prune = ['var/', 'var/run/', 'var/log/']
python_lib_dir = sysconfig.get_python_lib()
fixed = False
for dir_path, dir_names, file_names in os.walk(python_lib_dir):
for dir_name in dir_names:
if dir_name[:21] == 'newrelic_plugin_agent' and \
dir_name[-8:] == 'egg-info':
filename = '%s/%s/installed-files.txt' % (python_lib_dir, dir_name)
with open(filename, 'r') as handle:
output = []
for line in handle:
safe = True
for dir_path in prune:
if line[-(len(dir_path) + 1):].strip() == dir_path:
safe = False
fixed = True
break
if safe:
output.append(line.strip())
with open(filename, 'w') as handle:
handle.write('\n'.join(output))
break
break
if fixed:
print 'Fixed a serious uninstallation problem in previous version'
| import os
import site
# Check to see if the previous version was installed and clean up
# installed-files.txt
prune = ['var/', 'var/run/', 'var/log/']
package_directories = site.PREFIXES
if site.USER_SITE:
package_directories.append(site.USER_SITE)
for package_dir in package_directories:
print 'Checking %s for newrelic_plugin_agent installation manifest' % package_dir
fixed = False
for dir_path, dir_names, file_names in os.walk(package_dir):
for dir_name in dir_names:
if dir_name[:21] == 'newrelic_plugin_agent' and \
dir_name[-8:] == 'egg-info':
filename = '%s/%s/installed-files.txt' % (python_lib_dir, dir_name)
with open(filename, 'r') as handle:
output = []
for line in handle:
safe = True
for dir_path in prune:
if line[-(len(dir_path) + 1):].strip() == dir_path:
safe = False
fixed = True
break
if safe:
output.append(line.strip())
if fixed:
with open(filename, 'w') as handle:
handle.write('\n'.join(output))
break
break
if fixed:
print 'Fixed a serious uninstallation problem in previous version'
else:
print 'Did not find the installed-files.txt manifest uninstallation issue'
| Make the script check all the site package directories | Make the script check all the site package directories
| Python | bsd-3-clause | notnmeyer/newrelic-plugin-agent,whiteear/newrelic-plugin-agent,alonisser/newrelic-plugin-agent,NewRelic-Python-Plugins/newrelic-python-agent,alonisser/newrelic-plugin-agent,whiteear/newrelic-plugin-agent,NewRelic-Python-Plugins/newrelic-python-agent,MeetMe/newrelic-plugin-agent,rounds/newrelic-plugin-agent,whiteear/newrelic-plugin-agent,notnmeyer/newrelic-plugin-agent,notnmeyer/newrelic-plugin-agent,ModusCreateOrg/newrelic-plugin-agent,MeetMe/newrelic-plugin-agent,NewRelic-Python-Plugins/newrelic-python-agent,rounds/newrelic-plugin-agent,rounds/newrelic-plugin-agent,alonisser/newrelic-plugin-agent,ModusCreateOrg/newrelic-plugin-agent,MeetMe/newrelic-plugin-agent,ModusCreateOrg/newrelic-plugin-agent |
a275068193c87c5a27758c17d7699e963a0bdfa8 | llvmpy/src/Support/FormattedStream.py | llvmpy/src/Support/FormattedStream.py | from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
new = Constructor(ref(raw_ostream), cast(bool, Bool))
| from binding import *
from ..namespace import llvm
from raw_ostream import raw_ostream
@llvm.Class(raw_ostream)
class formatted_raw_ostream:
_include_ = 'llvm/Support/FormattedStream.h'
_new = Constructor(ref(raw_ostream), cast(bool, Bool))
@CustomPythonStaticMethod
def new(stream, destroy=False):
inst = formatted_raw_ostream._new(stream, destroy)
inst.__underlying_stream = stream # to prevent it being freed first
return inst
| Fix formatted_raw_ostream ownership error with the underlying stream. | Fix formatted_raw_ostream ownership error with the underlying stream.
| Python | bsd-3-clause | llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy,llvmpy/llvmpy |
4f8aed6ed3491e62911619eaa9aa4b86b30065e4 | leonardo/module/leonardo_auth/widget/userlogin/models.py | leonardo/module/leonardo_auth/widget/userlogin/models.py | # -#- coding: utf-8 -#-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
| # -#- coding: utf-8 -#-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from leonardo.module.web.models import Widget
LOGIN_TYPE_CHOICES = (
(1, _("Admin")),
(2, _("Public")),
)
class UserLoginWidget(Widget):
type = models.PositiveIntegerField(verbose_name=_(
"type"), choices=LOGIN_TYPE_CHOICES, default=2)
def get_context_data(self, request):
context = super(UserLoginWidget, self).get_context_data(request)
if 'next' in request.GET:
context['next'] = request.GET['next']
else:
context['next'] = request.path
return context
class Meta:
abstract = True
verbose_name = _("user login")
verbose_name_plural = _("user logins")
| Fix missing next in context. | Fix missing next in context.
| Python | bsd-3-clause | django-leonardo/django-leonardo,django-leonardo/django-leonardo,django-leonardo/django-leonardo,django-leonardo/django-leonardo |