commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
1
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
3a9568b4d4de969b1e2031e8d2d3cdd7bd56824f
zerver/migrations/0237_rename_zulip_realm_to_zulipinternal.py
zerver/migrations/0237_rename_zulip_realm_to_zulipinternal.py
# -*- coding: utf-8 -*- from django.conf import settings from django.db import migrations from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps def rename_zulip_realm_to_zulipinternal(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: if not settings.PRODUCTION: return Realm = apps.get_model('zerver', 'Realm') UserProfile = apps.get_model('zerver', 'UserProfile') if Realm.objects.count() == 0: # Database not yet populated, do nothing: return if Realm.objects.filter(string_id="zulipinternal").exists(): return internal_realm = Realm.objects.get(string_id="zulip") # For safety, as a sanity check, verify that "internal_realm" is indeed the realm for system bots: welcome_bot = UserProfile.objects.get(email="[email protected]") assert welcome_bot.realm.id == internal_realm.id internal_realm.string_id = "zulipinternal" internal_realm.name = "System use only" internal_realm.save() class Migration(migrations.Migration): dependencies = [ ('zerver', '0236_remove_illegal_characters_email_full'), ] operations = [ migrations.RunPython(rename_zulip_realm_to_zulipinternal) ]
# -*- coding: utf-8 -*- from django.conf import settings from django.db import migrations from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor from django.db.migrations.state import StateApps def rename_zulip_realm_to_zulipinternal(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: if not settings.PRODUCTION: return Realm = apps.get_model('zerver', 'Realm') UserProfile = apps.get_model('zerver', 'UserProfile') if Realm.objects.count() == 0: # Database not yet populated, do nothing: return if Realm.objects.filter(string_id="zulipinternal").exists(): return if not Realm.objects.filter(string_id="zulip").exists(): # If the user renamed the `zulip` system bot realm (or deleted # it), there's nothing for us to do. return internal_realm = Realm.objects.get(string_id="zulip") # For safety, as a sanity check, verify that "internal_realm" is indeed the realm for system bots: welcome_bot = UserProfile.objects.get(email="[email protected]") assert welcome_bot.realm.id == internal_realm.id internal_realm.string_id = "zulipinternal" internal_realm.name = "System use only" internal_realm.save() class Migration(migrations.Migration): dependencies = [ ('zerver', '0236_remove_illegal_characters_email_full'), ] operations = [ migrations.RunPython(rename_zulip_realm_to_zulipinternal) ]
Fix zulipinternal migration corner case.
migrations: Fix zulipinternal migration corner case. It's theoretically possible to have configured a Zulip server where the system bots live in the same realm as normal users (and may have in fact been the default in early Zulip releases? Unclear.). We should handle these without the migration intended to clean up naming for the system bot realm crashing. Fixes #13660.
Python
apache-2.0
brainwane/zulip,andersk/zulip,hackerkid/zulip,synicalsyntax/zulip,andersk/zulip,hackerkid/zulip,punchagan/zulip,shubhamdhama/zulip,showell/zulip,zulip/zulip,synicalsyntax/zulip,kou/zulip,rht/zulip,showell/zulip,brainwane/zulip,punchagan/zulip,shubhamdhama/zulip,hackerkid/zulip,andersk/zulip,kou/zulip,brainwane/zulip,brainwane/zulip,andersk/zulip,showell/zulip,shubhamdhama/zulip,kou/zulip,punchagan/zulip,zulip/zulip,kou/zulip,showell/zulip,brainwane/zulip,eeshangarg/zulip,timabbott/zulip,eeshangarg/zulip,hackerkid/zulip,shubhamdhama/zulip,zulip/zulip,timabbott/zulip,brainwane/zulip,eeshangarg/zulip,rht/zulip,zulip/zulip,timabbott/zulip,rht/zulip,synicalsyntax/zulip,andersk/zulip,showell/zulip,shubhamdhama/zulip,rht/zulip,hackerkid/zulip,rht/zulip,hackerkid/zulip,eeshangarg/zulip,kou/zulip,synicalsyntax/zulip,brainwane/zulip,shubhamdhama/zulip,eeshangarg/zulip,andersk/zulip,rht/zulip,timabbott/zulip,punchagan/zulip,showell/zulip,eeshangarg/zulip,synicalsyntax/zulip,punchagan/zulip,timabbott/zulip,zulip/zulip,andersk/zulip,punchagan/zulip,zulip/zulip,hackerkid/zulip,timabbott/zulip,synicalsyntax/zulip,zulip/zulip,showell/zulip,eeshangarg/zulip,kou/zulip,punchagan/zulip,synicalsyntax/zulip,rht/zulip,kou/zulip,timabbott/zulip,shubhamdhama/zulip
97e3b202bbe6726a4056facb8b4690b0710029a9
handroll/tests/test_site.py
handroll/tests/test_site.py
# Copyright (c) 2015, Matt Layman import os import tempfile from handroll.site import Site from handroll.tests import TestCase class TestSite(TestCase): def test_finds_valid_site_root_from_templates(self): original = os.getcwd() valid_site = tempfile.mkdtemp() open(os.path.join(valid_site, 'template.html'), 'w').close() os.chdir(valid_site) site = Site() self.assertEqual(valid_site, site.path) os.chdir(original) def test_finds_valid_site_root_from_conf(self): original = os.getcwd() valid_site = tempfile.mkdtemp() open(os.path.join(valid_site, Site.CONFIG), 'w').close() os.chdir(valid_site) site = Site() self.assertEqual(valid_site, site.path) os.chdir(original) def test_site_has_absolute_path(self): original = os.getcwd() tempdir = tempfile.mkdtemp() site_path = os.path.join(tempdir, 'site') os.mkdir(site_path) os.chdir(tempdir) site = Site('site') self.assertEqual(site_path, site.path) os.chdir(original)
# Copyright (c) 2015, Matt Layman import os import tempfile from handroll.site import Site from handroll.tests import TestCase class TestSite(TestCase): def test_finds_valid_site_root_from_templates(self): original = os.getcwd() valid_site = os.path.realpath(tempfile.mkdtemp()) open(os.path.join(valid_site, 'template.html'), 'w').close() os.chdir(valid_site) site = Site() self.assertEqual(valid_site, site.path) os.chdir(original) def test_finds_valid_site_root_from_conf(self): original = os.getcwd() valid_site = os.path.realpath(tempfile.mkdtemp()) open(os.path.join(valid_site, Site.CONFIG), 'w').close() os.chdir(valid_site) site = Site() self.assertEqual(valid_site, site.path) os.chdir(original) def test_site_has_absolute_path(self): original = os.getcwd() tempdir = os.path.realpath(tempfile.mkdtemp()) site_path = os.path.join(tempdir, 'site') os.mkdir(site_path) os.chdir(tempdir) site = Site('site') self.assertEqual(site_path, site.path) os.chdir(original)
Use a real path when testing sites.
Use a real path when testing sites. Mac OS X returns link paths when calling `mkdtemp`. Calling realpath allows the site path comparison to be consistent across platforms.
Python
bsd-2-clause
handroll/handroll
7e44a8bd38105144111624710819a1ee54891222
campos_checkin/__openerp__.py
campos_checkin/__openerp__.py
# -*- coding: utf-8 -*- # Copyright 2017 Stein & Gabelgaard ApS # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). { 'name': 'Campos Checkin', 'description': """ CampOS Check In functionality""", 'version': '8.0.1.0.0', 'license': 'AGPL-3', 'author': 'Stein & Gabelgaard ApS', 'website': 'www.steingabelgaard.dk', 'depends': [ 'campos_jobber_final', 'campos_transportation', 'campos_crewnet', 'web_ir_actions_act_window_message', #'web_tree_dynamic_colored_field', ], 'data': [ 'wizards/campos_checkin_grp_wiz.xml', 'views/event_registration.xml', 'wizards/campos_checkin_wiz.xml', 'security/campos_checkin.xml', 'views/campos_event_participant.xml', 'views/campos_mat_report.xml', ], 'demo': [ ], }
# -*- coding: utf-8 -*- # Copyright 2017 Stein & Gabelgaard ApS # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). { 'name': 'Campos Checkin', 'description': """ CampOS Check In functionality""", 'version': '8.0.1.0.0', 'license': 'AGPL-3', 'author': 'Stein & Gabelgaard ApS', 'website': 'www.steingabelgaard.dk', 'depends': [ 'campos_jobber_final', 'campos_transportation', 'campos_crewnet', 'web_ir_actions_act_window_message', #'web_tree_dynamic_colored_field', ], 'data': [ 'wizards/campos_checkin_wiz.xml', 'security/campos_checkin.xml', 'views/campos_event_participant.xml', 'views/campos_mat_report.xml', 'wizards/campos_checkin_grp_wiz.xml', 'views/event_registration.xml', ], 'demo': [ ], }
Fix order for menu ref
Fix order for menu ref
Python
agpl-3.0
sl2017/campos
3cd99c23099a625da711e3ac458a46a7b364d83c
hystrix/pool.py
hystrix/pool.py
from __future__ import absolute_import from concurrent.futures import ThreadPoolExecutor import logging import six log = logging.getLogger(__name__) class PoolMetaclass(type): __instances__ = dict() __blacklist__ = ('Pool', 'PoolMetaclass') def __new__(cls, name, bases, attrs): if name in cls.__blacklist__: return super(PoolMetaclass, cls).__new__(cls, name, bases, attrs) pool_key = attrs.get('pool_key') or '{}Pool'.format(name) new_class = super(PoolMetaclass, cls).__new__(cls, pool_key, bases, attrs) setattr(new_class, 'pool_key', pool_key) if pool_key not in cls.__instances__: cls.__instances__[pool_key] = new_class return cls.__instances__[pool_key] class Pool(six.with_metaclass(PoolMetaclass, ThreadPoolExecutor)): pool_key = None def __init__(self, pool_key=None, max_workers=5): super(Pool, self).__init__(max_workers)
from __future__ import absolute_import from concurrent.futures import ProcessPoolExecutor import logging import six log = logging.getLogger(__name__) class PoolMetaclass(type): __instances__ = dict() __blacklist__ = ('Pool', 'PoolMetaclass') def __new__(cls, name, bases, attrs): if name in cls.__blacklist__: return super(PoolMetaclass, cls).__new__(cls, name, bases, attrs) pool_key = attrs.get('pool_key') or '{}Pool'.format(name) new_class = super(PoolMetaclass, cls).__new__(cls, pool_key, bases, attrs) setattr(new_class, 'pool_key', pool_key) if pool_key not in cls.__instances__: cls.__instances__[pool_key] = new_class return cls.__instances__[pool_key] class Pool(six.with_metaclass(PoolMetaclass, ProcessPoolExecutor)): pool_key = None def __init__(self, pool_key=None, max_workers=5): super(Pool, self).__init__(max_workers)
Change Pool to use ProcessPoolExecutor
Change Pool to use ProcessPoolExecutor
Python
apache-2.0
wiliamsouza/hystrix-py,wiliamsouza/hystrix-py
c52edc120f38acb079fa364cdb684fc2052d4727
corehq/messaging/smsbackends/trumpia/urls.py
corehq/messaging/smsbackends/trumpia/urls.py
from django.conf.urls import url from corehq.messaging.smsbackends.trumpia.views import TrumpiaIncomingView urlpatterns = [ url(r'^sms/(?P<api_key>[\w-]+)/?$', TrumpiaIncomingView.as_view(), name=TrumpiaIncomingView.urlname), ]
from django.conf.urls import url from corehq.apps.hqwebapp.decorators import waf_allow from corehq.messaging.smsbackends.trumpia.views import TrumpiaIncomingView urlpatterns = [ url(r'^sms/(?P<api_key>[\w-]+)/?$', waf_allow('XSS_QUERYSTRING')(TrumpiaIncomingView.as_view()), name=TrumpiaIncomingView.urlname), ]
Annotate trumpia url to say it allows XML in the querystring
Annotate trumpia url to say it allows XML in the querystring
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
ecb6f388ba3c0f17fcfd22a8cafcda974e7e0fc8
site.py
site.py
import sys from flask import Flask, render_template from flask_flatpages import FlatPages from flask_frozen import Freezer DEBUG = True FLATPAGES_AUTO_RELOAD = DEBUG FLATPAGES_EXTENSION = '.md' FREEZER_DESTINATION = 'dist' app = Flask(__name__) app.config.from_object(__name__) pages = FlatPages(app) freezer = Freezer(app) @app.route('/') @app.route('/bio/') def index(): return render_template('bio.html', pages=pages) @app.route('/portfolio/') def portfolio(): return render_template('portfolio.html', pages=pages) @app.route('/portfolio/<path:path>/') def page(path): page = pages.get_or_404(path) return render_template('page.html', page=page) @app.route('/contatti/') def contatti(): page = pages.get_or_404("contatti") return render_template('page.html', page=page) if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == "build": freezer.freeze() else: app.run(port=8080)
import sys from flask import Flask, render_template from flask_flatpages import FlatPages, flatpages from flask_frozen import Freezer DEBUG = True FLATPAGES_AUTO_RELOAD = DEBUG FLATPAGES_EXTENSION = '.md' FREEZER_DESTINATION = 'dist' app = Flask(__name__) app.config.from_object(__name__) pages = FlatPages(app) freezer = Freezer(app) @app.route('/') @app.route('/bio/') def index(): return render_template('bio.html', pages=pages) @app.route('/portfolio/') def portfolio(): projects = (p for p in pages if 'date' in p.meta) projects = sorted(projects, reverse=True, key=lambda p: p.meta['date']) return render_template('portfolio.html', pages=projects) @app.route('/portfolio/<path:path>/') def page(path): page = pages.get_or_404(path) return render_template('project.html', page=page) @app.route('/contatti/') def contatti(): page = pages.get_or_404("contatti") return render_template('page.html', page=page) if __name__ == '__main__': if len(sys.argv) > 1 and sys.argv[1] == "build": freezer.freeze() else: app.run(port=8080)
Return project ordered by date
Return project ordered by date
Python
mit
claudiopastorini/claudiopastorini.github.io,claudiopastorini/claudiopastorini.github.io,claudiopastorini/claudiopastorini.github.io
14546b0ba1dd7f5a9ea623fde85737fa95bd2843
test/integration/test_node_propagation.py
test/integration/test_node_propagation.py
from kitten.server import KittenServer from gevent.pool import Group from mock import MagicMock class TestPropagation(object): def setup_method(self, method): self.servers = Group() for port in range(4): ns = MagicMock() ns.port = 9812 + port server = KittenServer(ns) self.servers.spawn(server.listen_forever) def test_node_propagation(self): """ Tests that check node propagation 1) Spin up four servers. 2) Make the first one send a sync request to all three others. 3) Count the numbers of requests made. 4) Check databases to see that they all know each other. """ pass
from kitten.server import KittenServer from gevent.pool import Group from mock import MagicMock class TestPropagation(object): def setup_method(self, method): self.servers = Group() for port in range(4): ns = MagicMock() ns.port = 9812 + port server = KittenServer(ns) self.servers.spawn(server.listen_forever) def teardown_method(self, method): self.servers.kill(timeout=1) def test_node_propagation(self): """ Tests that check node propagation 1) Spin up four servers. 2) Make the first one send a sync request to all three others. 3) Count the numbers of requests made. 4) Check databases to see that they all know each other. """ pass
Add teardown of integration test
Add teardown of integration test
Python
mit
thiderman/network-kitten
58976f0e63376d056c482b3cd0e103c0a0cccb9e
test.py
test.py
#!/usr/bin/env python import unittest import ghstats class TestStats(unittest.TestCase): def test_cli(self): """ Test command line arguments. """ count = ghstats.main_cli(["kefir500/apk-icon-editor", "-q", "-d"]) self.assertTrue(count > 0) def test_releases(self): """ Download all releases. """ stats = ghstats.download_stats("kefir500", "apk-icon-editor", None, False, ghstats.get_env_token(), False) self.assertTrue(isinstance(stats, list)) count = ghstats.get_stats_downloads(stats, True) self.assertTrue(count > 0) def test_release(self): """ Download latest release. """ stats = ghstats.download_stats("kefir500", "apk-icon-editor", None, True, ghstats.get_env_token(), False) self.assertTrue(isinstance(stats, dict)) count = ghstats.get_stats_downloads(stats, True) self.assertTrue(count > 0) def test_invalid(self): """ Check nonexistent repository. """ self.assertRaises(ghstats.GithubRepoError, lambda: ghstats.download_stats("kefir500", "foobar", None, False, ghstats.get_env_token(), True)) if __name__ == '__main__': unittest.main()
#!/usr/bin/env python import unittest import ghstats class TestStats(unittest.TestCase): def test_cli(self): """ Test command line arguments. """ count = ghstats.main_cli(["kefir500/apk-icon-editor", "-q", "-d"]) self.assertTrue(count > 0) def test_releases(self): """ Download all releases. """ stats = ghstats.download_stats("kefir500", "apk-icon-editor", None, False, ghstats.get_env_token(), False) self.assertTrue(isinstance(stats, list)) count = ghstats.get_stats_downloads(stats, True) self.assertTrue(count > 0) def test_release(self): """ Download latest release. """ stats = ghstats.download_stats("kefir500", "apk-icon-editor", None, True, ghstats.get_env_token(), False) self.assertTrue(isinstance(stats, dict)) count = ghstats.get_stats_downloads(stats, True) self.assertTrue(count > 0) def test_invalid(self): """ Check nonexistent repository. """ self.assertRaises(ghstats.GithubRepoError, lambda: ghstats.download_stats("kefir500", "foobar", None, False, ghstats.get_env_token(), True)) if __name__ == "__main__": unittest.main()
Change single quotes to double
Change single quotes to double
Python
mit
kefir500/ghstats
d159f8201b9d9aeafd24f07a9e39855fc537182d
cocoscore/tools/data_tools.py
cocoscore/tools/data_tools.py
import pandas as pd def load_data_frame(data_frame_path, sort_reindex=False, class_labels=True): """ Load a sentence data set as pandas DataFrame from a given path. :param data_frame_path: the path to load the pandas DataFrame from :param sort_reindex: if True, the returned data frame will be sorted by PMID and reindex by 0, 1, 2, ... :param class_labels: if True, the class label is assumed to be present as the last column :return: a pandas DataFrame loaded from the given path """ column_names = ['pmid', 'paragraph', 'sentence', 'entity1', 'entity2', 'sentence_text'] if class_labels: column_names.append('class') data_df = pd.read_csv(data_frame_path, sep='\t', header=None, index_col=False, names=column_names) if sort_reindex: data_df.sort_values('pmid', axis=0, inplace=True, kind='mergesort') data_df.reset_index(inplace=True, drop=True) assert data_df.isnull().sum().sum() == 0 return data_df
import pandas as pd def load_data_frame(data_frame_path, sort_reindex=False, class_labels=True, match_distance=False): """ Load a sentence data set as pandas DataFrame from a given path. :param data_frame_path: the path to load the pandas DataFrame from :param sort_reindex: if True, the returned data frame will be sorted by PMID and reindex by 0, 1, 2, ... :param class_labels: if True, the class label is assumed to be present as the second-to-last column :param match_distance: if True, the distance between the closest match is assumed to be present as the last column :return: a pandas DataFrame loaded from the given path """ column_names = ['pmid', 'paragraph', 'sentence', 'entity1', 'entity2', 'sentence_text'] if class_labels: column_names.append('class') if match_distance: column_names.append('distance') data_df = pd.read_csv(data_frame_path, sep='\t', header=None, index_col=False, names=column_names) if sort_reindex: data_df.sort_values('pmid', axis=0, inplace=True, kind='mergesort') data_df.reset_index(inplace=True, drop=True) assert data_df.isnull().sum().sum() == 0 return data_df
Add match_distance flag to load_data_frame()
Add match_distance flag to load_data_frame()
Python
mit
JungeAlexander/cocoscore
d6dcd5ede1004b4f3dfbaba09e46a6728e8287a7
qipipe/qiprofile/sync.py
qipipe/qiprofile/sync.py
from qiprofile_rest_client.helpers import database from qiprofile_rest_client.model.subject import Subject from . import (clinical, imaging) def sync_session(project, collection, subject, session, filename): """ Updates the qiprofile database from the XNAT database content for the given session. :param project: the XNAT project name :param collection: the image collection name :param subject: the subject number :param session: the XNAT session name, without subject prefix :param filename: the XLS input file location """ # Get or create the database subject. sbj_pk = dict(project=project, collection=collection, number=subject) sbj = database.get_or_create(Subject, sbj_pk) # Update the clinical information from the XLS input. clinical.sync(sbj, filename) # Update the imaging information from XNAT. imaging.sync(sbj, session)
from qiprofile_rest_client.helpers import database from qiprofile_rest_client.model.subject import Subject from qiprofile_rest_client.model.imaging import Session from . import (clinical, imaging) def sync_session(project, collection, subject, session, filename): """ Updates the qiprofile database from the XNAT database content for the given session. :param project: the XNAT project name :param collection: the image collection name :param subject: the subject number :param session: the XNAT session number :param filename: the XLS input file location """ # Get or create the subject database subject. key = dict(project=project, collection=collection, number=subject) sbj = database.get_or_create(Subject, key) # Update the clinical information from the XLS input. clinical.sync(sbj, filename) # Update the imaging information from XNAT. imaging.sync(sbj, session)
Use the REST client get_or_create helper function.
Use the REST client get_or_create helper function.
Python
bsd-2-clause
ohsu-qin/qipipe
24c83c6a7a1981184545a72b3691a29121d81050
lib-dynload/lz4/__init__.py
lib-dynload/lz4/__init__.py
import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lz4", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lz4", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lz4", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lz4.block._block") compress = module.compress decompress = module.decompress sys.path.pop(0) break
import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lz4", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lz4", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lz4", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d, "_lz4", "block") ) import importlib module = importlib.import_module("_block") compress = module.compress decompress = module.decompress sys.path.pop(0) break
Fix load of compiled lz4 module
Fix load of compiled lz4 module
Python
mit
sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs
920e2fbb7e99c17dbe8d5b71e9c9b26a718ca444
ideascube/search/apps.py
ideascube/search/apps.py
from django.apps import AppConfig from django.db.models.signals import pre_migrate, post_migrate from .utils import create_index_table, reindex_content def create_index(sender, **kwargs): if isinstance(sender, SearchConfig): create_index_table(force=True) def reindex(sender, **kwargs): if isinstance(sender, SearchConfig): reindex_content(force=False) class SearchConfig(AppConfig): name = 'ideascube.search' verbose_name = 'Search' def ready(self): pre_migrate.connect(create_index, sender=self) post_migrate.connect(reindex, sender=self)
from django.apps import AppConfig from django.db.models.signals import pre_migrate, post_migrate from .utils import create_index_table, reindex_content def create_index(sender, **kwargs): if (kwargs['using'] == 'transient' and isinstance(sender, SearchConfig)): create_index_table(force=True) def reindex(sender, **kwargs): if (kwargs['using'] == 'transient' and isinstance(sender, SearchConfig)): reindex_content(force=False) class SearchConfig(AppConfig): name = 'ideascube.search' verbose_name = 'Search' def ready(self): pre_migrate.connect(create_index, sender=self) post_migrate.connect(reindex, sender=self)
Make (pre|post)_migrate scripts for the index table only if working on 'transient'.
Make (pre|post)_migrate scripts for the index table only if working on 'transient'. Django run (pre|post)_migrate script once per database. As we have two databases, the create_index is launch twice with different kwargs['using'] ('default' and 'transient'). We should try to create the index table only when we are working on the transient database. Most of the time, this is not important and create a new index table twice is not important. However, if we run tests, the database are configured and migrate one after the other and the 'transient' database may be miss-configured at a time. By creating the table only at the right time, we ensure that everything is properly configured.
Python
agpl-3.0
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
5d192ed46c0ca76d2dd3aa25f7fac5ad7b61ad3e
scripts/ls.py
scripts/ls.py
import argparse import teuthology.ls def main(): teuthology.ls.main(parse_args()) def parse_args(): parser = argparse.ArgumentParser(description='List teuthology job results') parser.add_argument( '--archive-dir', metavar='DIR', default='.', help='path under which to archive results', ) parser.add_argument( '-v', '--verbose', action='store_true', default=False, help='show reasons tests failed', ) return parser.parse_args()
import argparse import teuthology.ls def main(): teuthology.ls.main(parse_args()) def parse_args(): parser = argparse.ArgumentParser(description='List teuthology job results') parser.add_argument( 'archive_dir', metavar='DIR', default='.', help='path under which to archive results', ) parser.add_argument( '-v', '--verbose', action='store_true', default=False, help='show reasons tests failed', ) return parser.parse_args()
Make the archive dir a positional argument
Make the archive dir a positional argument Signed-off-by: Zack Cerza <[email protected]>
Python
mit
t-miyamae/teuthology,dreamhost/teuthology,ktdreyer/teuthology,SUSE/teuthology,tchaikov/teuthology,ivotron/teuthology,ktdreyer/teuthology,t-miyamae/teuthology,SUSE/teuthology,dmick/teuthology,yghannam/teuthology,SUSE/teuthology,robbat2/teuthology,yghannam/teuthology,michaelsevilla/teuthology,dmick/teuthology,ceph/teuthology,ivotron/teuthology,caibo2014/teuthology,zhouyuan/teuthology,robbat2/teuthology,dreamhost/teuthology,tchaikov/teuthology,michaelsevilla/teuthology,zhouyuan/teuthology,caibo2014/teuthology,ceph/teuthology,dmick/teuthology
8545423373dee1f4b801375922b67bc2417cb426
ooni/resources/update.py
ooni/resources/update.py
import os from twisted.internet import reactor, defer, protocol from twisted.web.client import RedirectAgent, Agent from ooni.settings import config from ooni.resources import inputs, geoip agent = RedirectAgent(Agent(reactor)) class SaveToFile(protocol.Protocol): def __init__(self, finished, filesize, filename): self.finished = finished self.remaining = filesize self.outfile = open(filename, 'wb') def dataReceived(self, bytes): if self.remaining: display = bytes[:self.remaining] self.outfile.write(display) self.remaining -= len(display) else: self.outfile.close() def connectionLost(self, reason): self.outfile.close() self.finished.callback(None) @defer.inlineCallbacks def download_resource(resources): for filename, resource in resources.items(): print "Downloading %s" % filename filename = os.path.join(config.resources_directory, filename) response = yield agent.request("GET", resource['url']) finished = defer.Deferred() response.deliverBody(SaveToFile(finished, response.length, filename)) yield finished if resource['action'] is not None: yield defer.maybeDeferred(resource['action'], filename, *resource['action_args']) print "%s written." % filename def download_inputs(): return download_resource(inputs) def download_geoip(): return download_resource(geoip)
import os from twisted.internet import defer from twisted.web.client import downloadPage from ooni.settings import config from ooni.resources import inputs, geoip @defer.inlineCallbacks def download_resource(resources): for filename, resource in resources.items(): print "Downloading %s" % filename filename = os.path.join(config.resources_directory, filename) yield downloadPage(resource['url'], filename) if resource['action'] is not None: yield defer.maybeDeferred(resource['action'], filename, *resource['action_args']) print "%s written." % filename def download_inputs(): return download_resource(inputs) def download_geoip(): return download_resource(geoip)
Simplify the code for downloading resources.
Simplify the code for downloading resources. Use downloadPage instead of our own class.
Python
bsd-2-clause
0xPoly/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe
360efe51bc45f189c235bed6b2b7bfdd4fd1bfbd
flask-restful/api.py
flask-restful/api.py
from flask import Flask, request from flask_restful import Resource, Api, reqparse from indra import reach from indra.statements import * import json app = Flask(__name__) api = Api(app) parser = reqparse.RequestParser() parser.add_argument('txt') parser.add_argument('json') class InputText(Resource): def post(self): args = parser.parse_args() txt = args['txt'] rp = reach.process_text(txt, offline=False) st = rp.statements json_statements = {} json_statements['statements'] = [] for s in st: s_json = s.to_json() json_statements['statements'].append(s_json) json_statements = json.dumps(json_statements) return json_statements, 201 api.add_resource(InputText, '/parse') class InputStmtJSON(Resource): def post(self): args = parser.parse_args() print(args) json_data = args['json'] json_dict = json.loads(json_data) st = [] for j in json_dict['statements']: s = Statement.from_json(j) print(s) st.append(s) return 201 api.add_resource(InputStmtJSON, '/load') if __name__ == '__main__': app.run(debug=True)
import json from bottle import route, run, request, post, default_app from indra import trips, reach, bel, biopax from indra.statements import * @route('/trips/process_text', method='POST') def trips_process_text(): body = json.load(request.body) text = body.get('text') tp = trips.process_text(text) if tp and tp.statements: stmts = json.dumps([json.loads(st.to_json()) for st in tp.statements]) res = {'statements': stmts} return res else: res = {'statements': []} return res @route('/reach/process_text', method='POST') def reach_process_text(): body = json.load(request.body) text = body.get('text') rp = reach.process_text(text) if rp and rp.statements: stmts = json.dumps([json.loads(st.to_json()) for st in rp.statements]) res = {'statements': stmts} return res else: res = {'statements': []} return res @route('/reach/process_pmc', method='POST') def reach_process_pmc(): body = json.load(request.body) pmcid = body.get('pmcid') rp = reach.process_pmc(pmcid) if rp and rp.statements: stmts = json.dumps([json.loads(st.to_json()) for st in rp.statements]) res = {'statements': stmts} return res else: res = {'statements': []} return res if __name__ == '__main__': app = default_app() run(app)
Reimplement using bottle and add 3 endpoints
Reimplement using bottle and add 3 endpoints
Python
bsd-2-clause
sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,pvtodorov/indra,bgyori/indra,johnbachman/indra,johnbachman/indra,pvtodorov/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra,bgyori/indra,johnbachman/belpy,bgyori/indra,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/indra
797b42cfd752d4ce43fdce616280710478420197
tests/rules/test_git_remote_seturl_add.py
tests/rules/test_git_remote_seturl_add.py
import pytest from thefuck.rules.git_remote_seturl_add import match, get_new_command from tests.utils import Command @pytest.mark.parametrize('command', [ Command(script='git remote set-url origin url', stderr="fatal: No such remote")]) def test_match(command): assert match(command) @pytest.mark.parametrize('command', [ Command('git remote set-url origin url', stderr=""), Command('git remote add origin url'), Command('git remote remove origin'), Command('git remote prune origin'), Command('git remote set-branches origin branch')]) def test_not_match(command): assert not match(command) @pytest.mark.parametrize('command, new_command', [ (Command('git remote set-url origin [email protected]:nvbn/thefuck.git'), 'git remote add origin [email protected]:nvbn/thefuck.git')]) def test_get_new_command(command, new_command): assert get_new_command(command) == new_command
import pytest from thefuck.rules.git_remote_seturl_add import match, get_new_command from tests.utils import Command @pytest.mark.parametrize('command', [ Command(script='git remote set-url origin url', stderr="fatal: No such remote")]) def test_match(command): assert match(command) @pytest.mark.parametrize('command', [ Command('git remote set-url origin url', stderr=""), Command('git remote add origin url'), Command('git remote remove origin'), Command('git remote prune origin'), Command('git remote set-branches origin branch')]) def test_not_match(command): assert not match(command) @pytest.mark.parametrize('command, new_command', [ (Command('git remote set-url origin [email protected]:nvbn/thefuck.git'), 'git remote add origin [email protected]:nvbn/thefuck.git')]) def test_get_new_command(command, new_command): assert get_new_command(command) == new_command
Fix flake8 errors: E302 expected 2 blank lines, found 1
Fix flake8 errors: E302 expected 2 blank lines, found 1
Python
mit
nvbn/thefuck,Clpsplug/thefuck,scorphus/thefuck,nvbn/thefuck,Clpsplug/thefuck,scorphus/thefuck,mlk/thefuck,SimenB/thefuck,SimenB/thefuck,mlk/thefuck
cfe848c3aa7e2365ec93f04edb2edf7357068a9a
gn/create_sksl_enums.py
gn/create_sksl_enums.py
#!/usr/bin/env python # # Copyright 2017 Google Inc. # # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import subprocess import sys src = open(sys.argv[1], 'r') dst = open(sys.argv[2], 'w') dst.write('R"(') for line in src.readlines(): if not line.startswith("#"): dst.write(line) dst.write(')"\n') src.close() dst.close()
#!/usr/bin/env python # # Copyright 2017 Google Inc. # # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import subprocess import sys src = open(sys.argv[1], 'r') dst = open(sys.argv[2], 'wb') dst.write('R"(') for line in src.readlines(): if not line.startswith("#"): dst.write(line) dst.write(')"\n') src.close() dst.close()
Create sksl_enums.inc with UNIX line endings (even on Windows)
Create sksl_enums.inc with UNIX line endings (even on Windows) Change-Id: I6e17a8498647c4bd09281f880a94cbfdcd5930ea Reviewed-on: https://skia-review.googlesource.com/106020 Reviewed-by: Ethan Nicholas <[email protected]> Commit-Queue: Brian Osman <[email protected]>
Python
bsd-3-clause
aosp-mirror/platform_external_skia,HalCanary/skia-hc,rubenvb/skia,HalCanary/skia-hc,Hikari-no-Tenshi/android_external_skia,rubenvb/skia,Hikari-no-Tenshi/android_external_skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,HalCanary/skia-hc,Hikari-no-Tenshi/android_external_skia,HalCanary/skia-hc,rubenvb/skia,HalCanary/skia-hc,Hikari-no-Tenshi/android_external_skia,HalCanary/skia-hc,Hikari-no-Tenshi/android_external_skia,rubenvb/skia,Hikari-no-Tenshi/android_external_skia,google/skia,google/skia,Hikari-no-Tenshi/android_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,rubenvb/skia,rubenvb/skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,rubenvb/skia,google/skia,google/skia,HalCanary/skia-hc,google/skia,rubenvb/skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,rubenvb/skia,HalCanary/skia-hc,Hikari-no-Tenshi/android_external_skia,rubenvb/skia
db51cb32148a595f74eb4ed8cbcc5dc989db5786
src/reduce_framerate.py
src/reduce_framerate.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # (C) 2015 Jean Nassar # Released under BSD version 4 """ Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz. """ import rospy from sensor_msgs.msg import Image class ImageFeature(object): """ A ROS image Publisher/Subscriber. """ def __init__(self): self.image_subscriber = rospy.Subscriber("/ardrone/image_raw", Image, self.image_callback, queue_size=1) self.image_pub = rospy.Publisher("/output/slow_image_raw", Image, queue_size=1) rospy.logdebug("Subscribed to /ardrone/image_raw") self.count = 0 def frame_callback(self, frame): """ Callback function of subscribed topic. """ # Publish every fifteenth frame if not self.count % 15: self.image_pub.publish(frame) self.count += 1 def main(): """Initialize and cleanup ROS node.""" rospy.init_node("framerate_reducer", anonymous=True) ImageFeature() rospy.loginfo("Reducing framerate") rospy.spin() if __name__ == "__main__": main()
#!/usr/bin/env python # -*- coding: utf-8 -*- # (C) 2015 Jean Nassar # Released under BSD version 4 """ Reduce /ardrone/image_raw framerate from 30 Hz to 2 Hz. """ import rospy from sensor_msgs.msg import Image class ImageFeature(object): """ A ROS image Publisher/Subscriber. """ def __init__(self): self.image_subscriber = rospy.Subscriber("/ardrone/image_raw", Image, self.image_callback, queue_size=1) self.image_publisher = rospy.Publisher("/output/slow_image_raw", Image, queue_size=1) rospy.logdebug("Subscribed to /ardrone/image_raw") self.count = 0 def frame_callback(self, frame): """ Callback function of subscribed topic. """ # Publish every fifteenth frame if not self.count % 15: self.image_publisher.publish(frame) self.count += 1 def main(): """Initialize ROS node.""" rospy.init_node("framerate_reducer", anonymous=True) ImageFeature() rospy.loginfo("Reducing framerate") rospy.spin() if __name__ == "__main__": main()
Rename image_pub to image_publisher; change docstring.
Rename image_pub to image_publisher; change docstring.
Python
mit
masasin/spirit,masasin/spirit
03ffbda9725d7cf37b2fe8df2cfe13b2096c81c1
src/Person.py
src/Person.py
#!/usr/bin/env python """ Contains player and NPC-classes. """ import logging from Item import Item class Person(object): """ Base class for all characters in game. """ DEFAULT_HEALTH = 100 def __init__(self, health=DEFAULT_HEALTH, position): """ Defaults to facing north. Facing codes: - 0: North - 1: East - 2: South - 3: West @param health The health that is given at init. @param position [x, y] the position at init. """ if not isinstance(position, (tuple, list)): logging.error( "Position should be tuple/list with [y, x], set it to [0, 0]" ) position = [0, 0] self.health, self.position, self.facing = health, position, 0 class Player(Person): """ Contains the player-controlled character. """ def __init__(self, health=DEFAULT_HEALTH, position): super(Player, self).__init__(health, position) self.inventory = [] def give_item(self, item): if not isinstance(item, Item): logging.error( "Item given to player is not item instance." ) return self.inventory.append(item) class NPC(Person): """ Contains a character controlled by the game. """ def next_step(): """ Since the game controls this character, some algorithm should say where it moves. TODO """ pass
#!/usr/bin/env python """ Contains player and NPC-classes. """ import logging from Item import Item class Person(object): """ Base class for all characters in game. """ DEFAULT_HEALTH = 100 def __init__(self, health=DEFAULT_HEALTH, position): """ Defaults to facing north. Facing codes: - 0: North - 1: East - 2: South - 3: West @param health The health that is given at init. @param position [x, y] the position at init. """ if not isinstance(position, (tuple, list)): logging.error( "Position should be tuple/list with [x, y], set it to [0, 0]" ) position = [0, 0] self.health, self.position, self.facing = health, position, 0 class Player(Person): """ Contains the player-controlled character. """ def __init__(self, health=DEFAULT_HEALTH, position): super(Player, self).__init__(health, position) self.inventory = [] def give_item(self, item): if not isinstance(item, Item): logging.error( "Item given to player is not item instance." ) return self.inventory.append(item) class NPC(Person): """ Contains a character controlled by the game. """ def next_step(): """ Since the game controls this character, some algorithm should say where it moves. TODO """ pass
Revert "x,y should be y,x"
Revert "x,y should be y,x" This reverts commit 7636eb6ce4f23c6f787aed02590499b6d2ea60b2.
Python
apache-2.0
benedicteb/outcast
e78910c8b9ecf48f96a693dae3c15afa32a12da1
casexml/apps/phone/views.py
casexml/apps/phone/views.py
from django_digest.decorators import * from casexml.apps.phone import xml from casexml.apps.case.models import CommCareCase from casexml.apps.phone.restore import generate_restore_response from casexml.apps.phone.models import User from casexml.apps.case import const @httpdigest def restore(request): user = User.from_django_user(request.user) restore_id = request.GET.get('since') return generate_restore_response(user, restore_id) def xml_for_case(request, case_id, version="1.0"): """ Test view to get the xml for a particular case """ from django.http import HttpResponse case = CommCareCase.get(case_id) return HttpResponse(xml.get_case_xml(case, [const.CASE_ACTION_CREATE, const.CASE_ACTION_UPDATE], version), mimetype="text/xml")
from django.http import HttpResponse from django_digest.decorators import * from casexml.apps.phone import xml from casexml.apps.case.models import CommCareCase from casexml.apps.phone.restore import generate_restore_response from casexml.apps.phone.models import User from casexml.apps.case import const @httpdigest def restore(request): user = User.from_django_user(request.user) restore_id = request.GET.get('since') return generate_restore_response(user, restore_id) def xml_for_case(request, case_id, version="1.0"): """ Test view to get the xml for a particular case """ case = CommCareCase.get(case_id) return HttpResponse(xml.get_case_xml(case, [const.CASE_ACTION_CREATE, const.CASE_ACTION_UPDATE], version), mimetype="text/xml")
Revert "moving httpresponse to view"
Revert "moving httpresponse to view" This reverts commit a6f501bb9de6382e35372996851916adac067fa0.
Python
bsd-3-clause
SEL-Columbia/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq
5ac26c7ec252778f58887279b76f22d15095b0df
stock_packaging_calculator/__manifest__.py
stock_packaging_calculator/__manifest__.py
# Copyright 2020 Camptocamp SA # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl) { "name": "Stock packaging calculator", "summary": "Compute product quantity to pick by packaging", "version": "14.0.1.2.0", "development_status": "Alpha", "category": "Warehouse Management", "website": "https://github.com/OCA/stock-logistics-warehouse", "author": "Camptocamp, Odoo Community Association (OCA)", "license": "LGPL-3", "application": False, "installable": True, "depends": ["product"], }
# Copyright 2020 Camptocamp SA # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl) { "name": "Stock packaging calculator", "summary": "Compute product quantity to pick by packaging", "version": "14.0.1.2.0", "development_status": "Beta", "category": "Warehouse Management", "website": "https://github.com/OCA/stock-logistics-warehouse", "author": "Camptocamp, Odoo Community Association (OCA)", "license": "LGPL-3", "application": False, "installable": True, "depends": ["product"], }
Change development_status key to Beta
[DCK] stock_packaging_calculator: Change development_status key to Beta
Python
agpl-3.0
OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse
4aa6987d3048d6de36ddc07b63a02a3ddf3ab410
integration/integration.py
integration/integration.py
# Python Packages import random # External Packages import numpy as np def sin_theta_sum(theta): return np.sin(theta) def gen_random_value(count, rmin, rmax): value = 0 for i in range(count): value += np.random.uniform(rmin, rmax) # test_range(rmin, rmax, value) return value def run_monte_carlo(samples, function, func_coeff, func_vars): value = 0 for i in range(samples): if i % 10000 == 0: print(i) value += function(func_vars) value = value*func_coeff/samples return value def sin_monte_element(rmax): value = gen_random_value(8, 0, rmax) result = sin_theta_sum(value) return result def main(): rmax = np.pi/8 samples = 10000000 coefficient = 1000000 volume = np.power(np.pi/8, 8) func_coeff = coefficient*volume func_vars = rmax result = run_monte_carlo(samples, sin_monte_element, func_coeff, func_vars) print(result) def test_range(rmin, rmax, value): if (value <= rmin or value >= rmax): print(False) main()
# Python Packages import random # External Packages import numpy as np def sin_theta_sum(variables): theta = 0 for var in variables: theta += var return np.sin(theta) def gen_random_variables(count, rmin, rmax): variables = [] for i in range(count): variables.append(np.random.uniform(rmin, rmax)) # test_range(rmin, rmax, value) return variables def run_monte_carlo(samples, function, func_coeff, func_vars): value = 0 for i in range(samples): if i % 10000 == 0: print(i) value += function(func_vars) value = value*func_coeff/samples return value def sin_monte_element(rmax): value = gen_random_variables(8, 0, rmax) result = sin_theta_sum(value) return result def main(): rmax = np.pi/8 samples = 10000000 coefficient = 1000000 volume = np.power(rmax, 8) func_coeff = coefficient*volume func_vars = rmax result = run_monte_carlo(samples, sin_monte_element, func_coeff, func_vars) print(result) def test_range(rmin, rmax, value): if (value <= rmin or value >= rmax): print(False) main()
Adjust code to restore generality.
Adjust code to restore generality. Discussions suggest runtime is not catestrophically slow, so return generality to the code for sake of my sanity.
Python
mit
lemming52/white_knight
3d0ecb4bfd92e96dd195142c3b31955d521dfaf1
sahara/tests/unit/plugins/cdh/v5_5_0/test_plugin_utils_550.py
sahara/tests/unit/plugins/cdh/v5_5_0/test_plugin_utils_550.py
# Copyright (c) 2015 Intel Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from sahara.plugins.cdh.v5_4_0 import plugin_utils as pu from sahara.tests.unit.plugins.cdh import base_plugin_utils_test class TestPluginUtilsV540(base_plugin_utils_test.TestPluginUtilsHigherThanV5): def setUp(self): super(TestPluginUtilsV540, self).setUp() self.plug_utils = pu.PluginUtilsV540() self.version = "v5_4_0"
# Copyright (c) 2015 Intel Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from sahara.plugins.cdh.v5_5_0 import plugin_utils as pu from sahara.tests.unit.plugins.cdh import base_plugin_utils_test class TestPluginUtilsV550(base_plugin_utils_test.TestPluginUtilsHigherThanV5): def setUp(self): super(TestPluginUtilsV550, self).setUp() self.plug_utils = pu.PluginUtilsV550() self.version = "v5_5_0"
Correct the unit test in V5_5_0
Correct the unit test in V5_5_0 This file seems like copying from v5_4_0. Update it. Change-Id: I6125f66880feec7419ca6338ce23f05a01a2c3b3
Python
apache-2.0
egafford/sahara,openstack/sahara,openstack/sahara,egafford/sahara,tellesnobrega/sahara,tellesnobrega/sahara
63f6e4d50116d5ca2bfc82c1c608e08040055b5e
subdue/core/__init__.py
subdue/core/__init__.py
__all__ = [ 'color', 'BANNER', 'DEFAULT_DRIVER_CODE' 'die', 'verbose', 'use_colors', 'set_color_policy', ] import sys as _sys from . import color as _color BANNER = """\ _ _ ___ _ _| |__ __| |_ _ ___ / __| | | | '_ \ / _` | | | |/ _ \\ \__ \ |_| | |_) | (_| | |_| | __/ |___/\__,_|_.__/ \__,_|\__,_|\___| """ DEFAULT_DRIVER_CODE = """\ #!/usr/bin/env python from subdue.sub import main main() """ verbose = False def set_color_policy(policy): _color.color_policy = policy def die(msg): _sys.stderr.write(msg) _sys.stderr.write("\n") _sys.stderr.flush() _sys.exit(1)
__all__ = [ 'BANNER', 'DEFAULT_DRIVER_CODE' 'die', 'verbose', 'set_color_policy', ] import sys as _sys from . import color as _color BANNER = """\ _ _ ___ _ _| |__ __| |_ _ ___ / __| | | | '_ \ / _` | | | |/ _ \\ \__ \ |_| | |_) | (_| | |_| | __/ |___/\__,_|_.__/ \__,_|\__,_|\___| """ DEFAULT_DRIVER_CODE = """\ #!/usr/bin/env python from subdue.sub import main main() """ verbose = False def set_color_policy(policy): _color.color_policy = policy def die(msg): _sys.stderr.write(msg) _sys.stderr.write("\n") _sys.stderr.flush() _sys.exit(1)
Remove old exports from subdue.core
Remove old exports from subdue.core
Python
mit
jdevera/subdue
459bf08b9fe4ae5a879a138bd2497abb23bf5910
modules/expansion/cve.py
modules/expansion/cve.py
import json import requests misperrors = {'error': 'Error'} mispattributes = {'input': ['vulnerability'], 'output': ['']} moduleinfo = {'version': '0.1', 'author': 'Alexandre Dulaunoy', 'description': 'An expansion hover module to expand information about CVE id.', 'module-type': ['hover']} moduleconfig = [] cveapi_url = 'https://cve.circl.lu/api/cve/' def handler(q=False): if q is False: return False print (q) request = json.loads(q) if not request.get('vulnerability'): misperrors['error'] = 'Vulnerability id missing' return misperrors r = requests.get(cveapi_url+request.get('vulnerability')) if r.status_code == 200: vulnerability = json.loads(r.text) else: misperrors['error'] = 'cve.circl.lu API not accessible' return misperrors['error'] return vulnerability def introspection(): return mispattributes def version(): moduleinfo['config'] = moduleconfig return moduleinfo
import json import requests misperrors = {'error': 'Error'} mispattributes = {'input': ['vulnerability'], 'output': ['text']} moduleinfo = {'version': '0.2', 'author': 'Alexandre Dulaunoy', 'description': 'An expansion hover module to expand information about CVE id.', 'module-type': ['hover']} moduleconfig = [] cveapi_url = 'https://cve.circl.lu/api/cve/' def handler(q=False): if q is False: return False print (q) request = json.loads(q) if not request.get('vulnerability'): misperrors['error'] = 'Vulnerability id missing' return misperrors r = requests.get(cveapi_url+request.get('vulnerability')) if r.status_code == 200: vulnerability = json.loads(r.text) if vulnerability.get('summary'): summary = vulnerability['summary'] else: misperrors['error'] = 'cve.circl.lu API not accessible' return misperrors['error'] r = {'results': [{'types': mispattributes['output'], 'values': summary}]} return r def introspection(): return mispattributes def version(): moduleinfo['config'] = moduleconfig return moduleinfo
Return a text attribute for an hover only module
Return a text attribute for an hover only module
Python
agpl-3.0
VirusTotal/misp-modules,MISP/misp-modules,MISP/misp-modules,amuehlem/misp-modules,MISP/misp-modules,Rafiot/misp-modules,Rafiot/misp-modules,amuehlem/misp-modules,Rafiot/misp-modules,amuehlem/misp-modules,VirusTotal/misp-modules,VirusTotal/misp-modules
43efd1f110daa8f2f16475e4e6edbdf18ff28286
linter.py
linter.py
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Ben Edwards # Copyright (c) 2015 Ben Edwards # # License: MIT # """This module exports the PugLint plugin class.""" from SublimeLinter.lint import NodeLinter, util, highlight class PugLint(NodeLinter): """Provides an interface to pug-lint.""" cmd = 'pug-lint @ *' regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)' multiline = False tempfile_suffix = 'pug' error_stream = util.STREAM_BOTH defaults = { 'selector': 'text.pug, source.pypug, text.jade', '--reporter=': 'inline' } default_type = highlight.WARNING
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Ben Edwards # Copyright (c) 2015 Ben Edwards # # License: MIT # """This module exports the PugLint plugin class.""" from SublimeLinter.lint import NodeLinter, WARNING class PugLint(NodeLinter): """Provides an interface to pug-lint.""" cmd = 'pug-lint ${temp_file} ${args}' regex = r'^.+?:(?P<line>\d+)(:(?P<col>\d+) | )(?P<message>.+)' multiline = False tempfile_suffix = 'pug' error_stream = util.STREAM_BOTH defaults = { 'selector': 'text.pug, source.pypug, text.jade', '--reporter=': 'inline' } default_type = WARNING
Update to catch up with Sublime-Linter API
Update to catch up with Sublime-Linter API
Python
mit
benedfit/SublimeLinter-contrib-pug-lint,benedfit/SublimeLinter-contrib-jade-lint
f1874e9af69b22fd3f17938ba673d955780b69a9
linter.py
linter.py
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Ethan Zimmerman # Copyright (c) 2014 Ethan Zimmerman # # License: MIT # """This module exports the RamlCop plugin class.""" from SublimeLinter.lint import NodeLinter class RamlCop(NodeLinter): """Provides an interface to raml-cop.""" syntax = 'raml' cmd = 'raml-cop --no-color' version_requirement = '>= 1.0.0' regex = ( r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] ' r'(?P<message>.+)' ) line_col_base = (0, 0) tempfile_suffix = '-'
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Ethan Zimmerman # Copyright (c) 2014 Ethan Zimmerman # # License: MIT # """This module exports the RamlCop plugin class.""" from SublimeLinter.lint import NodeLinter class RamlCop(NodeLinter): """Provides an interface to raml-cop.""" syntax = 'raml' cmd = 'raml-cop --no-color' version_requirement = '>= 1.0.0' regex = ( r'^\[.+:(?P<line>\d+):(?P<col>\d+)\] ' r'(?P<message>.+)' ) line_col_base = (0, 0) tempfile_suffix = '-'
Revert "Remove empty line before class docstring"
Revert "Remove empty line before class docstring" This reverts commit 928cbc47cb7430d8a2fef924b61179cd30f5ca34.
Python
mit
thebinarypenguin/SublimeLinter-contrib-raml-cop
9e9a19e0f87806c75892f55b1d603bd47d552693
product_supplier_pricelist/__openerp__.py
product_supplier_pricelist/__openerp__.py
# -*- coding: utf-8 -*- { 'name': 'Product Supplier Pricelist', 'version': '1.0', 'category': 'Product', 'sequence': 14, 'summary': '', 'description': """ Product Supplier Pricelist ========================== Add sql constraint to restrict: 1. That you can only add one supplier to a product per company 2. That you can add olny one record of same quantity for a supplier pricelist It also adds to more menus (and add some related fields) on purchase/product. """, 'author': 'Ingenieria ADHOC', 'website': 'www.ingadhoc.com', 'images': [ ], 'depends': [ 'purchase', ], 'data': [ 'product_view.xml', ], 'demo': [ ], 'test': [ ], 'installable': True, 'auto_install': False, 'application': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# -*- coding: utf-8 -*- { 'name': 'Product Supplier Pricelist', 'version': '1.0', 'category': 'Product', 'sequence': 14, 'summary': '', 'description': """ Product Supplier Pricelist ========================== Add sql constraint to restrict: 1. That you can only add one supplier to a product per company 2. That you can add olny one record of same quantity for a supplier pricelist It also adds to more menus (and add some related fields) on purchase/product. """, 'author': 'Ingenieria ADHOC', 'website': 'www.ingadhoc.com', 'images': [ ], 'depends': [ 'purchase', ], 'data': [ 'product_view.xml', ], 'demo': [ ], 'test': [ ], # TODO fix this module and make installable 'installable': False, 'auto_install': False, 'application': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
FIX disable product supplier pricelist
FIX disable product supplier pricelist
Python
agpl-3.0
csrocha/account_journal_payment_subtype,csrocha/account_voucher_payline
c3c5bf54dbaa6dd5279cd82f9886d0d83fd07bcd
src/view/CtagsManager.py
src/view/CtagsManager.py
import subprocess def _eintr_retry_call(func, *args): while True: try: return func(*args) except OSError, e: if e.errno == errno.EINTR: continue raise def ct_query(filename): cmd = 'ctags -n -u --fields=+K -f -' args = cmd.split() args.append(filename) proc = subprocess.Popen(args, stdout=subprocess.PIPE) (out_data, err_data) = _eintr_retry_call(proc.communicate) out_data = out_data.split('\n') res = [] for line in out_data: if (line == ''): break line = line.split('\t') num = line[2].split(';', 1)[0] line = [line[0], num, line[3]] res.append(line) return res
import subprocess def _eintr_retry_call(func, *args): while True: try: return func(*args) except OSError, e: if e.errno == errno.EINTR: continue raise def ct_query(filename): cmd = 'ctags -n -u --fields=+K -f -' args = cmd.split() args.append(filename) proc = subprocess.Popen(args, stdout=subprocess.PIPE) (out_data, err_data) = _eintr_retry_call(proc.communicate) out_data = out_data.split('\n') res = [] for line in out_data: if (line == ''): break line = line.split('\t') num = line[2].split(';', 1)[0] line = [line[0], num, line[3]] res.append(line) return res
Fix indention error - thought that was fixed before my last push
Fix indention error - thought that was fixed before my last push
Python
bsd-3-clause
pombreda/seascope,eaglexmw/seascope,eaglexmw/seascope,pombreda/seascope,eaglexmw/seascope,pombreda/seascope
03380a1042443465d6f1d74afb5fd120dbc3379b
manage.py
manage.py
#!/usr/bin/env python3 from manager import Manager manager = Manager() @manager.command def build(threads=1): print("Starting a build with %d threads ..." % threads) @manager.command def clean(): pass if __name__ == '__main__': manager.main()
#!/usr/bin/env python3 from manager import Manager from multiprocessing import Pool manager = Manager() def func(period): from time import sleep sleep(period) @manager.command def build(threads=1): pool = Pool(threads) print("Starting a build with %d threads ..." % threads) pool.map(func, [1, 1, 1, 1, 1]) @manager.command def clean(): pass if __name__ == '__main__': manager.main()
Add parallelizing code to build
Add parallelizing code to build
Python
mit
tanayseven/personal_website,tanayseven/personal_website,tanayseven/personal_website,tanayseven/personal_website
1cbe91b1f4e4ef126dfce3ecd56016f33e7ad836
manage.py
manage.py
#!/usr/bin/env python import os import sys if __name__ == "__main__": if "--settings" not in sys.argv: os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pinry.settings.development") from django.core.management import execute_from_command_line if 'test' in sys.argv: from django.conf import settings settings.IS_TEST = True execute_from_command_line(sys.argv)
#!/usr/bin/env python import os import sys if __name__ == "__main__": if not any(arg.startswith("--settings") for arg in sys.argv): os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pinry.settings.development") from django.core.management import execute_from_command_line if 'test' in sys.argv: from django.conf import settings settings.IS_TEST = True execute_from_command_line(sys.argv)
Fix django development settingns again
Fix: Fix django development settingns again
Python
bsd-2-clause
pinry/pinry,lapo-luchini/pinry,lapo-luchini/pinry,lapo-luchini/pinry,pinry/pinry,pinry/pinry,lapo-luchini/pinry,pinry/pinry
dd9c16c5317b80c30ccca377a4b0064ebbeb4874
indra/tests/test_tas.py
indra/tests/test_tas.py
from nose.plugins.attrib import attr from indra.sources.tas import process_from_web @attr('slow') def test_processor(): tp = process_from_web(affinity_class_limit=10) assert tp assert tp.statements num_stmts = len(tp.statements) # This is the total number of statements about human genes assert num_stmts == 1128585, num_stmts assert all(len(s.evidence) >= 1 for s in tp.statements), \ 'Some statements lack any evidence'
from nose.plugins.attrib import attr from indra.sources.tas import process_from_web @attr('slow') def test_processor(): tp = process_from_web(affinity_class_limit=10) assert tp assert tp.statements num_stmts = len(tp.statements) # This is the total number of statements about human genes assert num_stmts == 1168706, num_stmts assert all(len(s.evidence) >= 1 for s in tp.statements), \ 'Some statements lack any evidence'
Update expected count again after changes
Update expected count again after changes
Python
bsd-2-clause
sorgerlab/indra,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,johnbachman/indra,johnbachman/indra,bgyori/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/belpy
a5003b6f45d262923a1c00bd9a9c1addb3854178
lapostesdk/apis/apibase.py
lapostesdk/apis/apibase.py
import requests from importlib import import_module class ApiBase(object): def __init__(self, api_key, product, version='v1', entity=None): self.product = product self.version = version self.entity = entity self.api_url = 'https://api.laposte.fr/%(product)s/%(version)s/' % { 'product': self.product, 'version': self.version} self.headers = {'X-Okapi-Key': api_key} def get(self, resource, params={}): response = self._get(resource, params) if self.entity is None: return response module = import_module('lapostesdk.entities') obj = getattr(module, self.entity) instance = obj() instance.hydrate(response) return instance def _get(self, resource, params={}): r = requests.get(self.api_url + resource, params=params, headers=self.headers) return r.json()
import requests from importlib import import_module class ApiBase(object): def __init__(self, api_key, product, version='v1', entity=None): self.product = product self.version = version self.entity = entity self.api_url = 'https://api.laposte.fr/%(product)s/%(version)s/' % { 'product': self.product, 'version': self.version} self.headers = {'X-Okapi-Key': api_key} def get(self, resource, params={}): response = self._get(resource, params) if self.entity is None: return response return self.create_object(response, self.entity) def _get(self, resource, params={}): r = requests.get(self.api_url + resource, params=params, headers=self.headers) return r.json() def create_object(self, response, entity): module = import_module('lapostesdk.entities') obj = getattr(module, self.entity) instance = obj() instance.hydrate(response) return instance
Move object creation outside of get method
Move object creation outside of get method
Python
mit
geelweb/laposte-python-sdk
b71aac6f519dd254bf23a9c74899ca20485dd340
tba_config.py
tba_config.py
import json import os DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev') # For choosing what the main landing page displays KICKOFF = 1 BUILDSEASON = 2 COMPETITIONSEASON = 3 OFFSEASON = 4 # The CONFIG variables should have exactly the same structure between environments # Eventually a test environment should be added. -gregmarra 17 Jul 2012 if DEBUG: CONFIG = { "env": "dev", "memcache": False, } else: CONFIG = { "env": "prod", "memcache": True, } CONFIG['landing_handler'] = COMPETITIONSEASON CONFIG["static_resource_version"] = 2
import json import os DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev') # For choosing what the main landing page displays KICKOFF = 1 BUILDSEASON = 2 COMPETITIONSEASON = 3 OFFSEASON = 4 # The CONFIG variables should have exactly the same structure between environments # Eventually a test environment should be added. -gregmarra 17 Jul 2012 if DEBUG: CONFIG = { "env": "dev", "memcache": False, } else: CONFIG = { "env": "prod", "memcache": True, } CONFIG['landing_handler'] = COMPETITIONSEASON CONFIG["static_resource_version"] = 3
Increment static resource to account for CDN JS
Increment static resource to account for CDN JS
Python
mit
phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,josephbisch/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance
01c85f24d788c8f92ad4ee04192d963f74521eec
scripts/rpkm_annotations_table.py
scripts/rpkm_annotations_table.py
#!/usr/bin/env python """A script to sum the rpkm values for all genes for each annotation.""" import pandas as pd import argparse import sys def main(args): rpkm_table =pd.read_table(args.rpkm_table, index_col=0) annotations = pd.read_table(args.annotation_table, header=None, names=["gene_id", "annotation", "evalue"]) annotation_rpkm = {} for annotation, annotation_df in annotations.groupby('annotation'): annotation_rpkm[annotation] = rpkm_table.ix[annotation_df.gene_id].sum() annotation_rpkm_df = pd.DataFrame.from_dict(annotation_rpkm, orient='index') # sort the columns of the dataframe annotation_rpkm_df = annotation_rpkm_df.reindex(columns=sorted(rpkm_table.columns)) annotation_rpkm_df.to_csv(sys.stdout, sep='\t') if __name__ == "__main__": parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("rpkm_table") parser.add_argument("annotation_table") args = parser.parse_args() main(args)
#!/usr/bin/env python """A script to sum the rpkm values for all genes for each annotation.""" import pandas as pd import argparse import sys def main(args): rpkm_table =pd.read_table(args.rpkm_table, index_col=0) annotations = pd.read_table(args.annotation_table, header=None, names=["gene_id", "annotation", "evalue", "score"]) annotation_rpkm = {} for annotation, annotation_df in annotations.groupby('annotation'): annotation_rpkm[annotation] = rpkm_table.ix[annotation_df.gene_id].sum() annotation_rpkm_df = pd.DataFrame.from_dict(annotation_rpkm, orient='index') # sort the columns of the dataframe annotation_rpkm_df = annotation_rpkm_df.reindex(columns=sorted(rpkm_table.columns)) annotation_rpkm_df.to_csv(sys.stdout, sep='\t') if __name__ == "__main__": parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("rpkm_table") parser.add_argument("annotation_table") args = parser.parse_args() main(args)
Use score as well in annotations table
Use score as well in annotations table
Python
mit
EnvGen/toolbox,EnvGen/toolbox
c0b19b1ed8655b540ba8431bb1224056ed5890df
pyscraper/patchfilter.py
pyscraper/patchfilter.py
#! /usr/bin/python2.3 # vim:sw=8:ts=8:et:nowrap import os import shutil def ApplyPatches(filein, fileout): # Generate short name such as wrans/answers2003-03-31.html (rest, name) = os.path.split(filein) (rest, dir) = os.path.split(rest) fileshort = os.path.join(dir, name) # Look for a patch file from our collection (which is # in the pyscraper/patches folder in Public Whip CVS) patchfile = os.path.join("patches", fileshort + ".patch") if not os.path.isfile(patchfile): return False while True: # Apply the patch shutil.copyfile(filein, fileout) # delete temporary file that might have been created by a previous patch failure filoutorg = fileout + ".orig" if os.path.isfile(filoutorg): os.remove(filoutorg) status = os.system("patch --quiet %s <%s" % (fileout, patchfile)) if status == 0: return True print "Error running 'patch' on file %s, blanking it out" % fileshort os.rename(patchfile, patchfile + ".old~") blankfile = open(patchfile, "w") blankfile.close()
#! /usr/bin/python2.3 # vim:sw=8:ts=8:et:nowrap import os import shutil def ApplyPatches(filein, fileout): # Generate short name such as wrans/answers2003-03-31.html (rest, name) = os.path.split(filein) (rest, dir) = os.path.split(rest) fileshort = os.path.join(dir, name) # Look for a patch file from our collection (which is # in the pyscraper/patches folder in Public Whip CVS) patchfile = os.path.join("patches", fileshort + ".patch") if not os.path.isfile(patchfile): return False while True: # Apply the patch shutil.copyfile(filein, fileout) # delete temporary file that might have been created by a previous patch failure filoutorg = fileout + ".orig" if os.path.isfile(filoutorg): os.remove(filoutorg) status = os.system("patch --quiet %s <%s" % (fileout, patchfile)) if status == 0: return True raise Exception, "Error running 'patch' on file %s" % fileshort #print "blanking out %s" % fileshort #os.rename(patchfile, patchfile + ".old~") #blankfile = open(patchfile, "w") #blankfile.close()
Remove code which blanks patch files
Remove code which blanks patch files
Python
agpl-3.0
openaustralia/publicwhip-matthew,openaustralia/publicwhip-matthew,openaustralia/publicwhip-matthew,openaustralia/publicwhip-matthew,openaustralia/publicwhip-matthew,openaustralia/publicwhip-matthew,openaustralia/publicwhip-matthew,openaustralia/publicwhip-matthew,openaustralia/publicwhip-matthew
1b385ce127f0a1802b0effa0054b44f58b3317b0
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/accounts/urls.py
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/accounts/urls.py
from django.contrib.auth import views from django.urls import path, re_path from accounts.forms import LoginForm, PasswordResetForm, SetPasswordForm urlpatterns = [ path( "login/", views.LoginView.as_view( template_name="accounts/login.html", authentication_form=LoginForm ), name="login", ), path("logout/", views.LogoutView.as_view(), name="logout"), # Password reset path( "account/password_reset/", views.PasswordResetView.as_view(form_class=PasswordResetForm), name="password_reset", ), path( "account/password_reset/done/", views.PasswordResetDoneView.as_view(), name="password_reset_done", ), re_path( r"^account/reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$", views.PasswordResetConfirmView.as_view(form_class=SetPasswordForm), name="password_reset_confirm", ), path( "account/reset/done/", views.PasswordResetCompleteView.as_view(), name="password_reset_complete", ), ]
from django.contrib.auth import views from django.urls import path from accounts.forms import LoginForm, PasswordResetForm, SetPasswordForm urlpatterns = [ path( "login/", views.LoginView.as_view( template_name="accounts/login.html", authentication_form=LoginForm ), name="login", ), path("logout/", views.LogoutView.as_view(), name="logout"), # Password reset path( "account/password_reset/", views.PasswordResetView.as_view(form_class=PasswordResetForm), name="password_reset", ), path( "account/password_reset/done/", views.PasswordResetDoneView.as_view(), name="password_reset_done", ), path( r"account/reset/<uidb64>/<token>/", views.PasswordResetConfirmView.as_view(form_class=SetPasswordForm), name="password_reset_confirm", ), path( "account/reset/done/", views.PasswordResetCompleteView.as_view(), name="password_reset_complete", ), ]
Fix webapp password reset link
DEVOPS-42: Fix webapp password reset link
Python
isc
thorgate/django-project-template,thorgate/django-project-template,thorgate/django-project-template,thorgate/django-project-template,thorgate/django-project-template
9b10f600b5611380f72fe2aeacfe2ee6f02e4e3a
kicad_footprint_load.py
kicad_footprint_load.py
import pcbnew import sys import os pretties = [] for dirname, dirnames, filenames in os.walk(sys.argv[1]): # don't go into any .git directories. if '.git' in dirnames: dirnames.remove('.git') for filename in filenames: if (not os.path.isdir(filename)) and (os.path.splitext(filename)[-1] == '.kicad_mod'): pretties.append(os.path.realpath(dirname)) break src_plugin = pcbnew.IO_MGR.PluginFind(1) for libpath in pretties: #Ignore paths with unicode as KiCad can't deal with them in enumerate list_of_footprints = src_plugin.FootprintEnumerate(libpath, False)
import pcbnew import sys import os pretties = [] for dirname, dirnames, filenames in os.walk(sys.argv[1]): # don't go into any .git directories. if '.git' in dirnames: dirnames.remove('.git') for filename in filenames: if (not os.path.isdir(filename)) and (os.path.splitext(filename)[-1] == '.kicad_mod'): pretties.append(os.path.realpath(dirname)) break src_plugin = pcbnew.IO_MGR.PluginFind(1) for libpath in pretties: list_of_footprints = src_plugin.FootprintEnumerate(libpath)
Switch to old invocation of FootprintEnumerate
Switch to old invocation of FootprintEnumerate
Python
mit
monostable/haskell-kicad-data,monostable/haskell-kicad-data,kasbah/haskell-kicad-data
eafd43442cc697bf2278f6df67c1577cc8f5bf56
support/jenkins/buildAllModuleCombination.py
support/jenkins/buildAllModuleCombination.py
import os from subprocess import call from itertools import product, repeat # To be called from the OpenSpace main folder modules = os.listdir("modules") modules.remove("base") # Get 2**len(modules) combinatorical combinations of ON/OFF settings = [] for args in product(*repeat(("ON", "OFF"), len(modules))): settings.append(args) # Create all commands cmds = [] for s in settings: cmd = ["cmake", "-DGHOUL_USE_DEVIL=NO", "-DOPENSPACE_MODULE_BASE=ON"] for m,s in zip(modules, s): cmd.append("-DOPENSPACE_MODULE_" + m.upper() + "=" + s) cmds.append(cmd) # Build cmake and compile for c in cmds: call(cmd) call(["make", "clean"]) call(["make", "-j4"])
import os from subprocess import call from itertools import product, repeat # To be called from the OpenSpace main folder modules = os.listdir("modules") modules.remove("base") # Get 2**len(modules) combinatorical combinations of ON/OFF settings = [] for args in product(*repeat(("ON", "OFF"), len(modules))): settings.append(args) # Create all commands cmds = [] for s in settings: cmd = ["cmake", "-DGHOUL_USE_DEVIL=NO", "-DOPENSPACE_MODULE_BASE=ON"] for m,s in zip(modules, s): cmd.append("-DOPENSPACE_MODULE_" + m.upper() + "=" + s) cmds.append(cmd) # Build cmake and compile for c in cmds: print "CMake:" , cmd call(cmd) call(["make", "clean"]) call(["make", "-j4"])
Print progress of combinatorical build
Print progress of combinatorical build
Python
mit
OpenSpace/OpenSpace,OpenSpace/OpenSpace,OpenSpace/OpenSpace,OpenSpace/OpenSpace
f5fd283497afb5030632108ce692e8acde526188
datalake_ingester/reporter.py
datalake_ingester/reporter.py
import boto.sns import simplejson as json import logging from memoized_property import memoized_property import os from datalake_common.errors import InsufficientConfiguration class SNSReporter(object): '''report ingestion events to SNS''' def __init__(self, report_key): self.report_key = report_key self.logger = logging.getLogger(self._log_name) @classmethod def from_config(cls): report_key = os.environ.get('DATALAKE_REPORT_KEY') if report_key is None: raise InsufficientConfiguration('Please configure a report_key') return cls(report_key) @property def _log_name(self): return self.report_key.split(':')[-1] @memoized_property def _connection(self): region = os.environ.get('AWS_REGION') if region: return boto.sns.connect_to_region(region) else: return boto.connect_sns() def report(self, ingestion_report): message = json.dumps(ingestion_report) self.logger.info('REPORTING: %s', message) self._connection.publish(topic=self.report_key, message=message)
import boto.sns import simplejson as json import logging from memoized_property import memoized_property import os class SNSReporter(object): '''report ingestion events to SNS''' def __init__(self, report_key): self.report_key = report_key self.logger = logging.getLogger(self._log_name) @classmethod def from_config(cls): report_key = os.environ.get('DATALAKE_REPORT_KEY') if report_key is None: return None return cls(report_key) @property def _log_name(self): return self.report_key.split(':')[-1] @memoized_property def _connection(self): region = os.environ.get('AWS_REGION') if region: return boto.sns.connect_to_region(region) else: return boto.connect_sns() def report(self, ingestion_report): message = json.dumps(ingestion_report) self.logger.info('REPORTING: %s', message) self._connection.publish(topic=self.report_key, message=message)
Allow the ingester to work without a report key
Allow the ingester to work without a report key
Python
apache-2.0
planetlabs/datalake-ingester,planetlabs/atl,planetlabs/datalake,planetlabs/datalake,planetlabs/datalake,planetlabs/datalake
2eab4c48962da52766c3d6f8051ad87aa505a90c
bonfiremanager/models.py
bonfiremanager/models.py
from django.db import models class Event(models.Model): name = models.CharField(max_length=1024, unique=True) slug = models.SlugField(max_length=1024) def __str__(self): return self.name class TimeSlot(models.Model): event = models.ForeignKey(Event) bookable = models.BooleanField(default=True) end = models.DateTimeField() name = models.CharField(max_length=1024) start = models.DateTimeField() def __str__(self): return "{0} ({1})".format(self.name, self.event) class Room(models.Model): event = models.ForeignKey(Event) directions = models.TextField() name = models.CharField(max_length=1024) def __str__(self): return "{0} ({1})".format(self.name, self.event) class Talk(models.Model): room = models.ForeignKey(Room, null=True, blank=True) timeslot = models.ForeignKey(TimeSlot, null=True, blank=True) description = models.TextField() slug = models.SlugField(max_length=1024) title = models.CharField(max_length=1024, unique=True) def __str__(self): return "{0} in {1}".format(self.title, self.room)
from django.db import models class Event(models.Model): name = models.CharField(max_length=1024, unique=True) slug = models.SlugField(max_length=1024) def __str__(self): return self.name class TimeSlot(models.Model): event = models.ForeignKey(Event) bookable = models.BooleanField(default=True) end = models.DateTimeField() name = models.CharField(max_length=1024) start = models.DateTimeField() def __str__(self): return "{0} ({1})".format(self.name, self.event) class Room(models.Model): event = models.ForeignKey(Event) directions = models.TextField() name = models.CharField(max_length=1024) def __str__(self): return "{0} ({1})".format(self.name, self.event) class Talk(models.Model): room = models.ForeignKey(Room, null=True, blank=True) timeslot = models.ForeignKey(TimeSlot, null=True, blank=True) description = models.TextField() slug = models.SlugField(max_length=1024) title = models.CharField(max_length=1024, unique=True) def __str__(self): return "{0} in {1} at {2}".format(self.title, self.room, self.timeslot)
Update Talk model __str__ to include time
Update Talk model __str__ to include time
Python
agpl-3.0
yamatt/bonfiremanager
f45fc8854647754b24df5f9601920368cd2d3c49
tests/chainerx_tests/unit_tests/test_cuda.py
tests/chainerx_tests/unit_tests/test_cuda.py
import pytest from chainerx import _cuda try: import cupy except Exception: cupy = None class CupyTestMemoryHook(cupy.cuda.memory_hook.MemoryHook): name = 'CupyTestMemoryHook' def __init__(self): self.used_bytes = 0 self.acquired_bytes = 0 def alloc_preprocess(self, **kwargs): self.acquired_bytes += kwargs['mem_size'] def malloc_preprocess(self, **kwargs): self.used_bytes += kwargs['mem_size'] @pytest.mark.cuda() def test_cupy_share_allocator(): with CupyTestMemoryHook() as hook: cp_allocated = cupy.arange(10) used_bytes = hook.used_bytes acquired_bytes = hook.acquired_bytes # Create a new array after changing the allocator to the memory pool # of ChainerX and make sure that no additional memory has been # allocated by CuPy. _cuda.cupy_share_allocator() chx_allocated = cupy.arange(10) cupy.testing.assert_array_equal(cp_allocated, chx_allocated) assert used_bytes == hook.used_bytes assert acquired_bytes == hook.acquired_bytes
import pytest from chainerx import _cuda try: import cupy except Exception: cupy = None class CupyTestMemoryHook(cupy.cuda.memory_hook.MemoryHook): name = 'CupyTestMemoryHook' def __init__(self): self.used_bytes = 0 self.acquired_bytes = 0 def alloc_preprocess(self, **kwargs): self.acquired_bytes += kwargs['mem_size'] def malloc_preprocess(self, **kwargs): self.used_bytes += kwargs['mem_size'] @pytest.mark.cuda() def test_cupy_share_allocator(): with CupyTestMemoryHook() as hook: cp_allocated = cupy.arange(10) used_bytes = hook.used_bytes acquired_bytes = hook.acquired_bytes assert used_bytes > 0 assert acquired_bytes > 0 # Create a new array after changing the allocator to the memory pool # of ChainerX and make sure that no additional memory has been # allocated by CuPy. _cuda.cupy_share_allocator() chx_allocated = cupy.arange(10) cupy.testing.assert_array_equal(cp_allocated, chx_allocated) assert used_bytes == hook.used_bytes assert acquired_bytes == hook.acquired_bytes
Add safety checks in test
Add safety checks in test
Python
mit
wkentaro/chainer,hvy/chainer,niboshi/chainer,okuta/chainer,chainer/chainer,wkentaro/chainer,chainer/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,hvy/chainer,pfnet/chainer,hvy/chainer,chainer/chainer,keisuke-umezawa/chainer,okuta/chainer,chainer/chainer,tkerola/chainer,keisuke-umezawa/chainer,wkentaro/chainer,niboshi/chainer,hvy/chainer,okuta/chainer,niboshi/chainer,niboshi/chainer,okuta/chainer,wkentaro/chainer
429f38497da0fd520e5bc5bd82e6d4ed5a405521
real_estate_agency/real_estate_agency/views.py
real_estate_agency/real_estate_agency/views.py
from django.shortcuts import render, render_to_response from django.template import RequestContext from new_buildings.models import Builder, ResidentalComplex, NewApartment from new_buildings.forms import SearchForm from feedback.models import Feedback def corporation_benefit_plan(request): return render(request, 'corporation_benefit_plan.html') def index(request): # Only 2 requests to DB feedbacks = Feedback.objects.all()[:4].select_related().prefetch_related('social_media_links') # Only 2 requests to DB residental_complexes = ResidentalComplex.objects.filter( is_popular=True).prefetch_related('type_of_complex') context = { 'feedbacks': feedbacks, 'form': SearchForm, 'residental_complexes': residental_complexes, } return render(request, 'index.html', context, ) def privacy_policy(request): return render(request, 'privacy_policy.html') def thanks(request): return render(request, 'thanks.html')
from django.shortcuts import render from new_buildings.models import ResidentalComplex from new_buildings.forms import NewBuildingsSearchForm from feedback.models import Feedback def corporation_benefit_plan(request): return render(request, 'corporation_benefit_plan.html') def index(request): # Only 2 requests to DB feedbacks = Feedback.objects.all( )[:4].select_related().prefetch_related('social_media_links') # Only 2 requests to DB residental_complexes = ResidentalComplex.objects.filter( is_popular=True).prefetch_related('type_of_complex') context = { 'feedbacks': feedbacks, 'form': NewBuildingsSearchForm, 'residental_complexes': residental_complexes, } return render(request, 'index.html', context, ) def privacy_policy(request): return render(request, 'privacy_policy.html') def thanks(request): return render(request, 'thanks.html')
Use NewBuildingsSearchForm as main page search form.
Use NewBuildingsSearchForm as main page search form. intead of non-complete SearchForm.
Python
mit
Dybov/real_estate_agency,Dybov/real_estate_agency,Dybov/real_estate_agency
d791b593dbf3d6505bf9eac8766aaf0b7f22c721
launch_instance.py
launch_instance.py
# License under the MIT License - see LICENSE import boto.ec2 import os import time def launch(key_name=None, region='us-west-2', image_id='ami-5189a661', instance_type='t2.micro', security_groups='launch-wizard-1', user_data=None, initial_check=True): ''' ''' if not isinstance(security_groups, list): security_groups = [security_groups] ec2 = boto.ec2.connect_to_region(region) reserve = ec2.run_instances(image_id, key_name=key_name, instance_type=instance_type, security_groups=security_groups, user_data=user_data) inst = reserve.instances[0] while inst.state == u'pending': time.sleep(10) inst.update() if initial_check: # Wait for the status checks first status = ec2.get_all_instance_status(instance_ids=[inst.id])[0] check_stat = "Status:initializing" while str(status.system_status) == check_stat and str(status.instance_status) == check_stat: time.sleep(10) status = ec2.get_all_instance_status(instance_ids=[inst.id])[0] return inst # ec2.get_instance_attribute('i-336b69f6', 'instanceType')
# License under the MIT License - see LICENSE import boto.ec2 import os import time def launch(key_name=None, region='us-west-2', image_id='ami-5189a661', instance_type='t2.micro', security_groups='launch-wizard-1', user_data=None, initial_check=False): ''' ''' if not isinstance(security_groups, list): security_groups = [security_groups] ec2 = boto.ec2.connect_to_region(region) reserve = ec2.run_instances(image_id, key_name=key_name, instance_type=instance_type, security_groups=security_groups, user_data=user_data) inst = reserve.instances[0] while inst.state == u'pending': time.sleep(10) inst.update() if initial_check: # Wait for the status checks first status = ec2.get_all_instance_status(instance_ids=[inst.id])[0] check_stat = "Status:initializing" while str(status.system_status) == check_stat and str(status.instance_status) == check_stat: time.sleep(10) status = ec2.get_all_instance_status(instance_ids=[inst.id])[0] return inst # ec2.get_instance_attribute('i-336b69f6', 'instanceType')
Disable the extra check by default
Disable the extra check by default
Python
mit
Astroua/aws_controller,Astroua/aws_controller
af74ee7ee8644392eacca207b4344de2e08105d7
addon.py
addon.py
import xbmc,xbmcaddon,xbmcgui,json,random def getAllMovies(): rpccmd = {'jsonrpc': '2.0', 'method': 'VideoLibrary.GetMovies', 'params': { 'properties': [ 'file' ] }, 'id': 'libMovies'} rpccmd = json.dumps(rpccmd) result = xbmc.executeJSONRPC(rpccmd) result = json.loads(result) return result addon = xbmcaddon.Addon() addonName = addon.getAddonInfo('name') addonIcon = addon.getAddonInfo('icon') movies = getAllMovies() movie = random.choice(movies['result']['movies']) time = 5000 xbmc.executebuiltin('PlayMedia(%s)'%(movie['file'])) xbmc.executebuiltin('Notification(%s, %s %s, %d, %s)'%(addonName,"Playing ",movie['label'],time,addonIcon))
import xbmc,xbmcaddon,xbmcgui,json,random def getAllMovies(): # TODO: determine all/unwatched/watched from settings... # rpccmd = {'jsonrpc': '2.0', 'method': 'VideoLibrary.GetMovies', 'params': { 'filter': { 'field': 'playcount', 'operator': 'lessthan', 'value': '1' }, 'properties': [ 'file' ] }, 'id': 'libMovies'} rpccmd = {'jsonrpc': '2.0', 'method': 'VideoLibrary.GetMovies', 'params': { 'properties': [ 'file' ] }, 'id': 'libMovies'} rpccmd = json.dumps(rpccmd) result = xbmc.executeJSONRPC(rpccmd) result = json.loads(result) return result addon = xbmcaddon.Addon() addonName = addon.getAddonInfo('name') addonIcon = addon.getAddonInfo('icon') movies = getAllMovies() movie = random.choice(movies['result']['movies']) time = 5000 xbmc.executebuiltin('PlayMedia(%s)'%(movie['file'])) xbmc.executebuiltin('Notification(%s, %s %s, %d, %s)'%(addonName,"Playing ",movie['label'],time,addonIcon))
Add todo for future work
Add todo for future work
Python
mit
nagilum/script.rndmov
667f92e8686fd1eb004a7c608acb45c70a9dd2f0
lib/rapidsms/message.py
lib/rapidsms/message.py
#!/usr/bin/env python # vim: ai ts=4 sts=4 et sw=4 import copy class Message(object): def __init__(self, backend, caller=None, text=None): self._backend = backend self.caller = caller self.text = text # initialize some empty attributes self.received = None self.sent = None self.responses = [] def __unicode__(self): return self.text @property def backend(self): # backend is read-only, since it's an # immutable property of this object return self._backend def send(self): """Send this message via self.backend, returning True if the message was sent successfully.""" return self.backend.router.outgoing(self) def flush_responses (self): for response in self.responses: response.send() def respond(self, text): """Send the given text back to the original caller of this message on the same route that it came in on""" if self.caller: response = copy.copy(self) response.text = text self.responses.append(response) return True else: return False
#!/usr/bin/env python # vim: ai ts=4 sts=4 et sw=4 import copy class Message(object): def __init__(self, backend, caller=None, text=None): self._backend = backend self.caller = caller self.text = text self.responses = [] def __unicode__(self): return self.text @property def backend(self): # backend is read-only, since it's an # immutable property of this object return self._backend def send(self): """Send this message via self.backend, returning True if the message was sent successfully.""" return self.backend.router.outgoing(self) def flush_responses (self): for response in self.responses: response.send() self.responses.remove(response) def respond(self, text): """Send the given text back to the original caller of this message on the same route that it came in on""" if self.caller: response = copy.copy(self) response.text = text self.responses.append(response) return True else: return False
Remove unused attributes; also, empty responses after it's flushed.
Remove unused attributes; also, empty responses after it's flushed.
Python
bsd-3-clause
lsgunth/rapidsms,rapidsms/rapidsms-core-dev,dimagi/rapidsms-core-dev,ken-muturi/rapidsms,ehealthafrica-ci/rapidsms,catalpainternational/rapidsms,eHealthAfrica/rapidsms,dimagi/rapidsms-core-dev,caktus/rapidsms,catalpainternational/rapidsms,rapidsms/rapidsms-core-dev,lsgunth/rapidsms,ehealthafrica-ci/rapidsms,peterayeni/rapidsms,caktus/rapidsms,catalpainternational/rapidsms,unicefuganda/edtrac,peterayeni/rapidsms,peterayeni/rapidsms,ehealthafrica-ci/rapidsms,unicefuganda/edtrac,ken-muturi/rapidsms,dimagi/rapidsms,caktus/rapidsms,peterayeni/rapidsms,lsgunth/rapidsms,eHealthAfrica/rapidsms,dimagi/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,unicefuganda/edtrac,lsgunth/rapidsms,catalpainternational/rapidsms
fe317230b6d2636b8a736c63be7769dd82663914
libraries/SwitchManager.py
libraries/SwitchManager.py
class SwitchManager(object): def extract_all_nodes(self, content): return [e['node'] for e in content['nodeProperties']] def extract_all_properties(self, content): pass
""" Library for the robot based system test tool of the OpenDaylight project. Authors: Baohua Yang@IBM, Denghui Huang@IBM Updated: 2013-11-10 """ class SwitchManager(object): def extract_all_nodes(self, content): """ Return all nodes. """ if isinstance(content,dict) or not content.has_key('nodeProperties'): return None else: return [e.get('node') for e in content['nodeProperties']] def extract_all_properties(self, content): pass
Add input check when getting all nodes.
Add input check when getting all nodes.
Python
epl-1.0
yeasy/robot_tool
99177cdc64bdec740557007800b610bff07ce46a
shivyc.py
shivyc.py
#!/usr/bin/env python3 """Main executable for ShivyC compiler For usage, run "./shivyc.py --help". """ import argparse def get_arguments(): """Set up the argument parser and return an object storing the argument values. return - An object storing argument values, as returned by argparse.parse_args() """ parser = argparse.ArgumentParser(description="Compile C files.") # The file name of the C file to compile. The file name gets saved to the # file_name attribute of the returned object, but this parameter appears as # "filename" (no underscore) on the command line. parser.add_argument("file_name", metavar="filename") return parser.parse_args() def compile_code(source: str) -> str: """Compile the provided source code into assembly. source - The C source code to compile. return - The asm output """ return source def main(): """Load the input files, and dispatch to the compile function for the main processing. """ arguments = get_arguments() try: c_file = open(arguments.file_name) except IOError: print("shivyc: error: no such file or directory: '{}'" .format(arguments.file_name)) else: compile_code(c_file.read()) c_file.close() if __name__ == "__main__": main()
#!/usr/bin/env python3 """Main executable for ShivyC compiler For usage, run "./shivyc.py --help". """ import argparse def get_arguments(): """Set up the argument parser and return an object storing the argument values. return - An object storing argument values, as returned by argparse.parse_args() """ parser = argparse.ArgumentParser(description="Compile C files.") # The file name of the C file to compile. The file name gets saved to the # file_name attribute of the returned object, but this parameter appears as # "filename" (no underscore) on the command line. parser.add_argument("file_name", metavar="filename") return parser.parse_args() def compile_code(source: str) -> str: """Compile the provided source code into assembly. source - The C source code to compile. return - The asm output """ return source def main(): """Load the input files and dispatch to the compile function for the main processing. The main function handles interfacing with the user, like reading the command line arguments, printing errors, and generating output files. The compilation logic is in the compile_code function to facilitate testing. """ arguments = get_arguments() try: c_file = open(arguments.file_name) except IOError: print("shivyc: error: no such file or directory: '{}'" .format(arguments.file_name)) else: compile_code(c_file.read()) c_file.close() if __name__ == "__main__": main()
Improve commenting on main function
Improve commenting on main function
Python
mit
ShivamSarodia/ShivyC,ShivamSarodia/ShivyC,ShivamSarodia/ShivyC
a34c9628c3f383e7b6f5eb521a9493f2b51d8811
plata/reporting/views.py
plata/reporting/views.py
from decimal import Decimal import StringIO from django.contrib.admin.views.decorators import staff_member_required from django.http import HttpResponse from django.shortcuts import get_object_or_404 from pdfdocument.utils import pdf_response import plata import plata.reporting.product import plata.reporting.order @staff_member_required def product_xls(request): output = StringIO.StringIO() workbook = plata.reporting.product.product_xls() workbook.save(output) response = HttpResponse(output.getvalue(), mimetype='application/vnd.ms-excel') response['Content-Disposition'] = 'attachment; filename=products.xls' return response @staff_member_required def order_pdf(request, order_id): order = get_object_or_404(plata.shop_instance().order_model, pk=order_id) order.shipping_cost = 8 / Decimal('1.076') order.shipping_discount = 0 order.recalculate_total(save=False) pdf, response = pdf_response('order-%09d' % order.id) plata.reporting.order.order_pdf(pdf, order) return response
from decimal import Decimal import StringIO from django.contrib.admin.views.decorators import staff_member_required from django.http import HttpResponse from django.shortcuts import get_object_or_404 from pdfdocument.utils import pdf_response import plata import plata.reporting.product import plata.reporting.order @staff_member_required def product_xls(request): output = StringIO.StringIO() workbook = plata.reporting.product.product_xls() workbook.save(output) response = HttpResponse(output.getvalue(), mimetype='application/vnd.ms-excel') response['Content-Disposition'] = 'attachment; filename=products.xls' return response @staff_member_required def order_pdf(request, order_id): order = get_object_or_404(plata.shop_instance().order_model, pk=order_id) pdf, response = pdf_response('order-%09d' % order.id) plata.reporting.order.order_pdf(pdf, order) return response
Remove hardcoded shipping modification in order PDF view
Remove hardcoded shipping modification in order PDF view
Python
bsd-3-clause
stefanklug/plata,armicron/plata,armicron/plata,allink/plata,armicron/plata
c2e6371f30b22c242fc1f4f60fdc3856cdb83514
scripts/commandsocket.py
scripts/commandsocket.py
import RPi.GPIO as GPIO import time from socketIO_client import SocketIO, LoggingNamespace socketIO = SocketIO('localhost:3000') def onCommand(*args): print(args[0].split('+')) while (True): socketIO.on("commands", onCommand) socketIO.wait(seconds=1) socketIO.off("sequencePi")
import RPi.GPIO as GPIO import time from socketIO_client import SocketIO, LoggingNamespace socketIO = SocketIO('localhost:3000') def onCommand(*args): print(args[0].split('+')[0]) while (True): socketIO.on("commands", onCommand) socketIO.wait(seconds=1) socketIO.off("sequencePi")
Print first element of array first
Print first element of array first
Python
mit
willdavidc/piel,willdavidc/piel,willdavidc/piel,willdavidc/piel,willdavidc/piel
78066748ef61554acd05e8776161b0ac7eb654cc
bootstrap/hooks.py
bootstrap/hooks.py
# coding: utf-8 from os.path import join, dirname, pardir, abspath import subprocess BOOTSTRAP = abspath(dirname(__file__)) ROOT = abspath(join(BOOTSTRAP, pardir)) # Path where venv will be created. It's imported by bootstrapX.Y.py VIRTUALENV = abspath(join(BOOTSTRAP, pardir)) ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py') WITH_VENV = join(BOOTSTRAP, 'with_venv.sh') def with_venv(*args): """ Runs the given command inside virtualenv. """ cmd = list(args) cmd.insert(0, WITH_VENV) return subprocess.call(cmd) def after_install(options, home_dir): with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt')) print "Done! Activate your virtualenv: source bin/activate"
# coding: utf-8 from os.path import join, dirname, pardir, abspath from shutil import copy import subprocess BOOTSTRAP = abspath(dirname(__file__)) ROOT = abspath(join(BOOTSTRAP, pardir)) # Path where venv will be created. It's imported by bootstrapX.Y.py VIRTUALENV = abspath(join(BOOTSTRAP, pardir)) ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py') WITH_VENV = join(BOOTSTRAP, 'with_venv.sh') def with_venv(*args): """ Runs the given command inside virtualenv. """ cmd = list(args) cmd.insert(0, WITH_VENV) return subprocess.call(cmd) def after_install(options, home_dir): copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV) with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt')) print "Done! Activate your virtualenv: source bin/activate"
Copy postactivate file to VIRTUALENV directory.
Copy postactivate file to VIRTUALENV directory.
Python
mit
henriquebastos/virtualenv-bootstrap,henriquebastos/virtualenv-bootstrap
3d3853d15e8a497bd104ae816498509cf8143662
number_to_words.py
number_to_words.py
class NumberToWords(object): """ Class for converting positive integer values to a textual representation of the submitted number for value of 0 up to 999999999. """ MAX = 999999999 SMALL_NUMBERS = ['', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'ten', 'eleven', 'twelve', 'thirteen', 'fourteen', 'fifteen', 'sixteen', 'seventeen', 'eighteen', 'nineteen'] TENS = ['', '', 'twenty', 'thirty', 'fourty', 'fifty', 'sixty', 'seventy', 'eighty', 'ninety'] LARGE_NUMBERS = ['', 'thousand', 'million'] def convert(self, number): """ Take an integer and return it converted to a textual representation. Args: number (int): The number to be converted. Returns: sentence (string): The textual representation of `number`. """
class NumberToWords(object): """ Class for converting positive integer values to a textual representation of the submitted number for value of 0 up to 999999999. """ MAX = 999999999 SMALL_NUMBERS = ['', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'ten', 'eleven', 'twelve', 'thirteen', 'fourteen', 'fifteen', 'sixteen', 'seventeen', 'eighteen', 'nineteen'] TENS = ['', '', 'twenty', 'thirty', 'fourty', 'fifty', 'sixty', 'seventy', 'eighty', 'ninety'] LARGE_NUMBERS = ['', 'thousand', 'million'] EXCEPTION_STRING = "This method expects positive integer values between " \ + "0 and {0}".format(MAX) def convert(self, number): """ Take an integer and return it converted to a textual representation. Args: number (int): The number to be converted. Returns: sentence (string): The textual representation of `number`. Raises: ValueError: If `number` is not a positive integer or is greater than `MAX`. """ if not isinstance(number, (int, long)): raise ValueError(self.EXCEPTION_STRING)
Add check to convert() so that only integers are acceptable input
Add check to convert() so that only integers are acceptable input - Using python 2.7 so check for both `int` and `long` - Update function definition to document expected exception conditions and exception type.
Python
mit
ianfieldhouse/number_to_words
cb3240d07ed44349687128c4aa49459f08030c09
spoppy.py
spoppy.py
import logging import traceback import click # Ignore error, logging set up in logging utils from spoppy import logging_utils from spoppy.navigation import Leifur logger = logging.getLogger(__name__) @click.command() @click.argument('username', envvar='SPOPPY_USERNAME') @click.argument('password', envvar='SPOPPY_PASSWORD') @click.option('--debug', default=False) def main(username, password, debug): navigator = Leifur(username, password) if debug: try: navigator.start() except Exception: traceback.print_exc() logger.error(traceback.format_exc()) finally: navigator.shutdown() logger.debug('Finally, bye!') else: try: navigator.start() finally: navigator.shutdown() logger.debug('Finally, bye!') if __name__ == '__main__': main(auto_envvar_prefix='SPOPPY', standalone_mode=False)
import logging import traceback import click # Ignore error, logging set up in logging utils from spoppy import logging_utils from spoppy.navigation import Leifur logger = logging.getLogger(__name__) @click.command() @click.argument('username', envvar='SPOPPY_USERNAME') @click.argument('password', envvar='SPOPPY_PASSWORD') @click.option('--debug', default=False) def main(username, password, debug): navigator = Leifur(username, password) if debug: try: navigator.start() except Exception: traceback.print_exc() logger.error(traceback.format_exc()) finally: navigator.shutdown() logger.debug('Finally, bye!') else: try: navigator.start() finally: navigator.shutdown() logger.debug('Finally, bye!') if __name__ == '__main__': try: main(standalone_mode=False) except click.exceptions.MissingParameter: click.echo( 'You must either set the SPOPPY_USERNAME and SPOPPY_PASSWORD ' 'environment variables or add username and password to your ' 'the script\'s parameters!' )
Fix error when trying to start without user/pass
Fix error when trying to start without user/pass This fixes #25
Python
mit
sindrig/spoppy,sindrig/spoppy
92b7f334907bdd4bce3593eb9faee0dc0ae3ef8f
testing/test_get_new.py
testing/test_get_new.py
from __future__ import absolute_import, print_function from ..pyautoupdate.launcher import Launcher from .pytest_skipif import needinternet from .pytest_makevers import fixture_update_dir import os @needinternet def test_check_vers_update(fixture_update_dir): package=fixture_update_dir("0.0.1") launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/') launch._get_new() with open(os.path.abspath("downloads/blah.py"), "r") as file_code: file_text=file_code.read() assert "new version" in file_text
from __future__ import absolute_import, print_function from ..pyautoupdate.launcher import Launcher from .pytest_skipif import needinternet from .pytest_makevers import fixture_update_dir import pytest import os @pytest.mark.trylast @needinternet def test_check_vers_update(fixture_update_dir): package=fixture_update_dir("0.0.1") launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/') launch._get_new() with open(os.path.abspath("downloads/blah.py"), "r") as file_code: file_text=file_code.read() assert "new version" in file_text
Mark get new test as needing to be last
Mark get new test as needing to be last
Python
lgpl-2.1
rlee287/pyautoupdate,rlee287/pyautoupdate
7b1a0022b41dbf17de352e4686458e5250b28e49
quantityfield/widgets.py
quantityfield/widgets.py
import re from django.forms.widgets import MultiWidget, Select, NumberInput from . import ureg class QuantityWidget(MultiWidget): def get_choices(self, allowed_types=None): allowed_types = allowed_types or dir(ureg) return [(x, x) for x in allowed_types] def __init__(self, attrs=None, base_units=None, allowed_types=None): choices = self.get_choices(allowed_types) self.base_units = base_units attrs = attrs or {} attrs.setdefault('step', 'any') widgets = ( NumberInput(attrs=attrs), Select(attrs=attrs, choices=choices) ) super(QuantityWidget, self).__init__(widgets, attrs) def decompress(self, value): non_decimal = re.compile(r'[^\d.]+') if value: number_value = non_decimal.sub('', str(value)) return [number_value, self.base_units] return [None, self.base_units]
import re from django.forms.widgets import MultiWidget, Select, NumberInput from . import ureg class QuantityWidget(MultiWidget): def get_choices(self, allowed_types=None): allowed_types = allowed_types or dir(ureg) return [(x, x) for x in allowed_types] def __init__(self, attrs=None, base_units=None, allowed_types=None): choices = self.get_choices(allowed_types) self.base_units = base_units attrs = attrs or {} attrs.setdefault('step', 'any') widgets = ( NumberInput(attrs=attrs), Select(attrs=attrs, choices=choices) ) super(QuantityWidget, self).__init__(widgets, attrs) def decompress(self, value): non_decimal = re.compile(r'[^\d.]+') if value: number_value = non_decimal.sub('', str(value)) return [number_value, self.base_units] return [None, self.base_units]
Fix indentation error from conversion to spaces
Fix indentation error from conversion to spaces
Python
mit
bharling/django-pint,bharling/django-pint
9ced61716167505875d3938ae01c08b61acc9392
randterrainpy/terrain.py
randterrainpy/terrain.py
"""This module is for the Terrain class, used for storing randomly generated terrain.""" class Terrain(object): """Container for a randomly generated area of terrain. Attributes: width (int): Width of generated terrain. length (int): Length of generated terrain. height_map (list): Map of heights of terrain. Values range from 0 to 1. """ def __init__(self, width, length): """Initializer for Terrain. Args: width (int): Width of terrain. length (int): Height of terrain. """ self.width = width self.length = length self.height_map = [[0 for _ in self.width]] * self.length def __getitem__(self, item): """Get an item at x-y coordinates. Args: item (tuple): 2-tuple of x and y coordinates. Returns: float: Height of terrain at coordinates, between 0 and 1. """ return self.height_map[item[1]][item[0]] def __setitem__(self, key, value): """Set the height of an item. Args: key (tuple): 2-tuple of x and y coordinates. value (float): New height of map at x and y coordinates, between 0 and 1. """ self.height_map[key[1]][key[0]] = value
"""This module is for the Terrain class, used for storing randomly generated terrain.""" class Terrain(object): """Container for a randomly generated area of terrain. Attributes: width (int): Width of generated terrain. length (int): Length of generated terrain. height_map (list): Map of heights of terrain. Values range from 0 to 1. """ def __init__(self, width, length): """Initializer for Terrain. Args: width (int): Width of terrain. length (int): Height of terrain. """ self.width = width self.length = length self.height_map = [[0 for _ in self.width]] * self.length def __getitem__(self, item): """Get an item at x-y coordinates. Args: item (tuple): 2-tuple of x and y coordinates. Returns: float: Height of terrain at coordinates, between 0 and 1. """ return self.height_map[item[1]][item[0]] def __setitem__(self, key, value): """Set the height of an item. Args: key (tuple): 2-tuple of x and y coordinates. value (float): New height of map at x and y coordinates, between 0 and 1. """ self.height_map[key[1]][key[0]] = value def __add__(self, other): """Add two terrains, height by height. Args: other (Terrain): Other terrain to add self to. Must have same dimensions as self. Returns: Terrain: Terrain of self and other added together. """ result = Terrain(self.width, self.length) for i in range(self.width): for j in range(self.length): result[i, j] = self[i, j] + other[i, j] return result
Add addition method to Terrain
Add addition method to Terrain
Python
mit
jackromo/RandTerrainPy
2e845dfd2695b1913f4603d88039049fa1eef923
repositories.py
repositories.py
repositories = [ { "owner": "talk-to", "name": "Knock" } ]
REPOSITORIES = [ { "owner": "talk-to", "name": "Knock" } ]
Use capitalised name for constant
Use capitalised name for constant
Python
mit
ayushgoel/LongShot
98b7836d066e52ea75c2253d539edc252af79d1a
pyrseas/cmdargs.py
pyrseas/cmdargs.py
# -*- coding: utf-8 -*- """Utility module for command line argument parsing""" from argparse import ArgumentParser, FileType def parent_parser(): """Create command line argument parser with common PostgreSQL options :return: the created parser """ parser = ArgumentParser(add_help=False) parser.add_argument('dbname', help='database name') group = parser.add_argument_group('Connection options') group.add_argument('-H', '--host', help="database server host or " "socket directory (default %(default)s)") group.add_argument('-p', '--port', type=int, help="database server port " "number (default %(default)s)") group.add_argument('-U', '--username', dest='user', help="database user name (default %(default)s)") group.add_argument('-W', '--password', action="store_true", help="force password prompt") parser.add_argument('-o', '--output', type=FileType('w'), help="output file name (default stdout)") parser.add_argument('--version', action='version', version='%(prog)s 0.4') return parser
# -*- coding: utf-8 -*- """Utility module for command line argument parsing""" from argparse import ArgumentParser, FileType def parent_parser(): """Create command line argument parser with common PostgreSQL options :return: the created parser """ parser = ArgumentParser(add_help=False) parser.add_argument('dbname', help='database name') group = parser.add_argument_group('Connection options') group.add_argument('-H', '--host', help="database server host or " "socket directory (default %(default)s)") group.add_argument('-p', '--port', type=int, help="database server port " "number (default %(default)s)") group.add_argument('-U', '--username', dest='username', help="database user name (default %(default)s)") group.add_argument('-W', '--password', action="store_true", help="force password prompt") parser.add_argument('-o', '--output', type=FileType('w'), help="output file name (default stdout)") parser.add_argument('--version', action='version', version='%(prog)s 0.4') return parser
Change 'username' destination to the 'username' attribute instead of user, since that's what is used in the code.
Change 'username' destination to the 'username' attribute instead of user, since that's what is used in the code.
Python
bsd-3-clause
reedstrm/Pyrseas,dvarrazzo/Pyrseas,perseas/Pyrseas
680b8680f5d2dbc7ccb742cf09bf6f688afcbd96
parse_brewpi_json.py
parse_brewpi_json.py
#!/usr/bin/python import os import os.path import json import re import tabulate def get_json_file(): dir='/var/www/html' json_list = [] for root, dirs, files in os.walk( dir ): for f in files: if f.endswith( '.json' ): json_list.append( os.path.join( root, f ) ) sorted_list = sorted( json_list, key=os.path.getmtime ) return sorted_list[-1] jfile = get_json_file() with open( jfile ) as fh: data = json.load( fh ) rex = re.compile( 'Time|BeerTemp|FridgeTemp|RoomTemp|RedTemp|RedSG' ) col_nums = [] for k,v in enumerate( data[ 'cols' ] ): name = v['id'] if rex.search( name ): col_nums.append( k ) headers = [ data['cols'][i]['id'] for i in col_nums ] rows = [] for row in data['rows']: values = [] for i in col_nums: elem = row['c'][i] val = None if elem is not None: val = elem['v'] values.append( val ) #datalist = [ row['c'][i]['v'] for i in col_nums ] rows.append( values ) print tabulate.tabulate( rows, headers=headers )
#!/usr/bin/python import os import os.path import json import re import tabulate def get_json_file(): dir='/var/www/html' json_list = [] for root, dirs, files in os.walk( dir ): for f in files: if f.endswith( '.json' ): json_list.append( os.path.join( root, f ) ) sorted_list = sorted( json_list, key=os.path.getmtime ) return sorted_list[-1] jfile = get_json_file() print( "\n{0}\n".format( os.path.basename( jfile ) ) ) with open( jfile ) as fh: data = json.load( fh ) rex = re.compile( 'Time|BeerTemp|FridgeTemp|RoomTemp|RedTemp|RedSG' ) col_nums = [] for k,v in enumerate( data[ 'cols' ] ): name = v['id'] if rex.search( name ): col_nums.append( k ) headers = [ data['cols'][i]['id'] for i in col_nums ] rows = [] for row in data['rows']: values = [] for i in col_nums: elem = row['c'][i] val = None if elem is not None: val = elem['v'] values.append( val ) #datalist = [ row['c'][i]['v'] for i in col_nums ] rows.append( values ) print( tabulate.tabulate( rows, headers=headers ) )
Add json filename to output.
Add json filename to output.
Python
mit
andylytical/brewpi-scripts,andylytical/brewpi-scripts
de40597b406b27c64077dc714b5890f83758d05d
multiplication-table.py
multiplication-table.py
""" multiplication-table.py Author: <your name here> Credit: <list sources used, if any> Assignment: Write and submit a Python program that prints a multiplication table. The user must be able to determine the width and height of the table before it is printed. The final multiplication table should look like this: Width of multiplication table: 10 Height of multiplication table: 8 1 2 3 4 5 6 7 8 9 10 2 4 6 8 10 12 14 16 18 20 3 6 9 12 15 18 21 24 27 30 4 8 12 16 20 24 28 32 36 40 5 10 15 20 25 30 35 40 45 50 6 12 18 24 30 36 42 48 54 60 7 14 21 28 35 42 49 56 63 70 8 16 24 32 40 48 56 64 72 80 """
""" multiplication-table.py Author: <your name here> Credit: <list sources used, if any> Assignment: Write and submit a Python program that prints a multiplication table. The user must be prompted to give the width and height of the table before it is printed. The final multiplication table should look like this: Width of multiplication table: 10 Height of multiplication table: 8 1 2 3 4 5 6 7 8 9 10 2 4 6 8 10 12 14 16 18 20 3 6 9 12 15 18 21 24 27 30 4 8 12 16 20 24 28 32 36 40 5 10 15 20 25 30 35 40 45 50 6 12 18 24 30 36 42 48 54 60 7 14 21 28 35 42 49 56 63 70 8 16 24 32 40 48 56 64 72 80 """
Reword about user giving dimensions
Reword about user giving dimensions
Python
mit
HHS-IntroProgramming/Multiplication-table,HHS-IntroProgramming/Multiplication-table
159753e117f5c82838c478e007b1b72248561205
config/ipython_config.py
config/ipython_config.py
# Available Subsitutions: # ${executable}: Path to IHaskell kernel. c = get_config() exe = '${executable}'.replace(' ', '\\\\ ') c.KernelManager.kernel_cmd = [exe, 'kernel', '{connection_file}'] c.Session.key = b'' c.Session.keyfile = b''
# Available Subsitutions: # ${executable}: Path to IHaskell kernel. c = get_config() exe = '${executable}'.replace(' ', '\\\\ ') c.KernelManager.kernel_cmd = [exe, 'kernel', '{connection_file}'] c.Session.key = '' c.Session.keyfile = ''
Change b'' to just ''.
Change b'' to just ''.
Python
mit
wyager/IHaskell,artuuge/IHaskell,kfiz/IHaskell,qzchenwl/IHaskell,aostiles/LiveHaskell,kfiz/IHaskell,FranklinChen/IHaskell,franklx/IHaskell,beni55/IHaskell,beni55/IHaskell,artuuge/IHaskell,wyager/IHaskell,aostiles/LiveHaskell,thomasjm/IHaskell,gibiansky/IHaskell,franklx/IHaskell,artuuge/IHaskell,thomasjm/IHaskell,thomasjm/IHaskell,franklx/IHaskell,qzchenwl/IHaskell,sumitsahrawat/IHaskell,FranklinChen/IHaskell,kfiz/IHaskell,FranklinChen/IHaskell,qzchenwl/IHaskell,beni55/IHaskell,aostiles/LiveHaskell,sumitsahrawat/IHaskell,kfiz/IHaskell,thomasjm/IHaskell,artuuge/IHaskell,gibiansky/IHaskell,gibiansky/IHaskell,FranklinChen/IHaskell,qzchenwl/IHaskell,wyager/IHaskell,wyager/IHaskell,sumitsahrawat/IHaskell,beni55/IHaskell,franklx/IHaskell
aa7109d038a86f6a19a9fb4af96bd1199cd81330
functest/opnfv_tests/openstack/snaps/snaps_utils.py
functest/opnfv_tests/openstack/snaps/snaps_utils.py
# Copyright (c) 2015 All rights reserved # This program and the accompanying materials # are made available under the terms of the Apache License, Version 2.0 # which accompanies this distribution, and is available at # # http://www.apache.org/licenses/LICENSE-2.0 from snaps.openstack.utils import neutron_utils, nova_utils def get_ext_net_name(os_creds): """ Returns the first external network name :param: os_creds: an instance of snaps OSCreds object :return: """ neutron = neutron_utils.neutron_client(os_creds) ext_nets = neutron_utils.get_external_networks(neutron) return ext_nets[0].name if ext_nets else "" def get_active_compute_cnt(os_creds): """ Returns the number of active compute servers :param: os_creds: an instance of snaps OSCreds object :return: the number of active compute servers """ nova = nova_utils.nova_client(os_creds) computes = nova_utils.get_availability_zone_hosts(nova, zone_name='nova') return len(computes)
# Copyright (c) 2015 All rights reserved # This program and the accompanying materials # are made available under the terms of the Apache License, Version 2.0 # which accompanies this distribution, and is available at # # http://www.apache.org/licenses/LICENSE-2.0 from functest.utils.constants import CONST from snaps.openstack.utils import neutron_utils, nova_utils def get_ext_net_name(os_creds): """ Returns the configured external network name or the first retrieved external network name :param: os_creds: an instance of snaps OSCreds object :return: """ neutron = neutron_utils.neutron_client(os_creds) ext_nets = neutron_utils.get_external_networks(neutron) if (hasattr(CONST, 'EXTERNAL_NETWORK')): extnet_config = CONST.__getattribute__('EXTERNAL_NETWORK') for ext_net in ext_nets: if ext_net.name == extnet_config: return extnet_config return ext_nets[0].name if ext_nets else "" def get_active_compute_cnt(os_creds): """ Returns the number of active compute servers :param: os_creds: an instance of snaps OSCreds object :return: the number of active compute servers """ nova = nova_utils.nova_client(os_creds) computes = nova_utils.get_availability_zone_hosts(nova, zone_name='nova') return len(computes)
Support to specify the valid external network name
Support to specify the valid external network name In some deployments, the retrieved external network by the def get_external_networks in Snaps checked by "router:external" is not available. So it is necessary to specify the available external network as an env by user. Change-Id: I333e91dd106ed307541a9a197280199fde86bd30 Signed-off-by: Linda Wang <[email protected]>
Python
apache-2.0
opnfv/functest,mywulin/functest,opnfv/functest,mywulin/functest
acd4238dce39464e99964227dca7758cffedca39
gaphor/UML/classes/tests/test_containmentconnect.py
gaphor/UML/classes/tests/test_containmentconnect.py
"""Test connection of containment relationship.""" from gaphor import UML from gaphor.diagram.tests.fixtures import allow, connect from gaphor.UML.classes import PackageItem from gaphor.UML.classes.containment import ContainmentItem def test_containment_package_glue(create): """Test containment glue to two package items.""" pkg1 = create(PackageItem, UML.Package) pkg2 = create(PackageItem, UML.Package) containment = create(ContainmentItem) glued = allow(containment, containment.head, pkg1) assert glued connect(containment, containment.head, pkg1) glued = allow(containment, containment.tail, pkg2) assert glued
"""Test connection of containment relationship.""" from gaphor import UML from gaphor.diagram.tests.fixtures import allow, connect from gaphor.UML.classes import ClassItem, PackageItem from gaphor.UML.classes.containment import ContainmentItem def test_containment_package_glue(create): """Test containment glue to two package items.""" pkg1 = create(PackageItem, UML.Package) pkg2 = create(PackageItem, UML.Package) containment = create(ContainmentItem) glued = allow(containment, containment.head, pkg1) assert glued connect(containment, containment.head, pkg1) glued = allow(containment, containment.tail, pkg2) assert glued def test_containment_package_class(create, diagram): """Test containment connecting to a package and a class.""" package = create(ContainmentItem, UML.Package) line = create(ContainmentItem) ac = create(ClassItem, UML.Class) connect(line, line.head, package) connect(line, line.tail, ac) assert diagram.connections.get_connection(line.tail).connected is ac assert len(package.subject.ownedElement) == 1 assert ac.subject in package.subject.ownedElement
Add test for connecting containment to package and a class
Add test for connecting containment to package and a class [skip ci] Signed-off-by: Dan Yeaw <[email protected]>
Python
lgpl-2.1
amolenaar/gaphor,amolenaar/gaphor
413ba364dc35a7186953d02bb7cc8cf705371873
contentious/constants.py
contentious/constants.py
from django.conf import settings SELF_CLOSING_HTML_TAGS = getattr(settings, 'CONTENTIOUS_SELF_CLOSING_HTML_TAGS', ['img', 'br', 'hr', 'meta']) #Note, the Javascript plugin has its own seprate copy of this: TREAT_CONTENT_AS_HTML_TAGS = getattr(settings, 'CONTENTIOUS_TREAT_CONTENT_AS_HTML_TAGS', ['div', 'select', 'ul'])
from django.conf import settings SELF_CLOSING_HTML_TAGS = ['img', 'br', 'hr', 'meta'] #Note, the Javascript plugin has its own seprate copy of this: TREAT_CONTENT_AS_HTML_TAGS = getattr(settings, 'CONTENTIOUS_TREAT_CONTENT_AS_HTML_TAGS', ['div', 'select', 'ul'])
Remove SELF_CLOSING_HTML_TAGS as a configurable option
Remove SELF_CLOSING_HTML_TAGS as a configurable option
Python
bsd-2-clause
potatolondon/contentious,potatolondon/contentious
182cd3b73382bb150111198e5fcbfa43a6bd416f
cbagent/collectors/libstats/typeperfstats.py
cbagent/collectors/libstats/typeperfstats.py
from cbagent.collectors.libstats.remotestats import RemoteStats, parallel_task class TPStats(RemoteStats): METRICS = ( ("rss", 1), # already in bytes ) def __init__(self, hosts, workers, user, password): super().__init__(hosts, workers, user, password) self.typeperf_cmd = "typeperf \"\\Process(*{}*)\\Working Set\" -sc 1|sed '3q;d'" @parallel_task(server_side=True) def get_samples(self, process): samples = {} if process == "beam.smp": stdout = self.run(self.typeperf_cmd.format("erl")) values = stdout.split(',')[1:5] elif process == "memcached": stdout = self.run(self.typeperf_cmd.format(process)) values = stdout.split(',')[1:2] else: return samples sum_rss = 0 if stdout: for v in values: v = float(v.replace('"', '')) sum_rss += v metric, multiplier = self.METRICS[0] title = "{}_{}".format(process, metric) samples[title] = float(sum_rss) * multiplier return samples
from cbagent.collectors.libstats.remotestats import RemoteStats, parallel_task class TPStats(RemoteStats): METRICS = ( ("rss", 1), # already in bytes ) def __init__(self, hosts, workers, user, password): super().__init__(hosts, workers, user, password) self.typeperf_cmd = "typeperf \"\\Process(*{}*)\\Working Set\" -sc 1|sed '3q;d'" @parallel_task(server_side=True) def get_server_samples(self, process): samples = {} if process == "beam.smp": stdout = self.run(self.typeperf_cmd.format("erl")) values = stdout.split(',')[1:5] elif process == "memcached": stdout = self.run(self.typeperf_cmd.format(process)) values = stdout.split(',')[1:2] else: return samples sum_rss = 0 if stdout: for v in values: v = float(v.replace('"', '')) sum_rss += v metric, multiplier = self.METRICS[0] title = "{}_{}".format(process, metric) samples[title] = float(sum_rss) * multiplier return samples def get_client_samples(self, process): pass
Add missing methods to TPStats
Add missing methods to TPStats Change-Id: I332a83f3816ee30597288180ed344da3161861f8 Reviewed-on: http://review.couchbase.org/79675 Tested-by: Build Bot <[email protected]> Reviewed-by: Pavel Paulau <[email protected]>
Python
apache-2.0
pavel-paulau/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner
61251afc42b53f7e72b13ad0543c4740d61f092e
mezzanine/core/sitemaps.py
mezzanine/core/sitemaps.py
from django.contrib.sitemaps import Sitemap from django.db.models import get_models from mezzanine.conf import settings from mezzanine.core.models import Displayable from mezzanine.utils.urls import home_slug blog_installed = "mezzanine.blog" in settings.INSTALLED_APPS if blog_installed: from mezzanine.blog.models import BlogPost class DisplayableSitemap(Sitemap): """ Sitemap class for Django's sitemaps framework that returns all published items for models that subclass ``Displayable``. """ def items(self): """ Return all published items for models that subclass ``Displayable``, excluding those that point to external sites. """ # Fake homepage object. home = Displayable() setattr(home, "get_absolute_url", home_slug) items = {home.get_absolute_url(): home} for model in get_models(): if issubclass(model, Displayable): for item in (model.objects.published() .exclude(slug__startswith="http://") .exclude(slug__startswith="https://")): items[item.get_absolute_url()] = item return items.values() def lastmod(self, obj): if blog_installed and isinstance(obj, BlogPost): return obj.publish_date
from django.contrib.sitemaps import Sitemap from django.contrib.sites.models import Site from django.db.models import get_models from mezzanine.conf import settings from mezzanine.core.models import Displayable from mezzanine.utils.sites import current_site_id from mezzanine.utils.urls import home_slug blog_installed = "mezzanine.blog" in settings.INSTALLED_APPS if blog_installed: from mezzanine.blog.models import BlogPost class DisplayableSitemap(Sitemap): """ Sitemap class for Django's sitemaps framework that returns all published items for models that subclass ``Displayable``. """ def items(self): """ Return all published items for models that subclass ``Displayable``, excluding those that point to external sites. """ # Fake homepage object. home = Displayable() setattr(home, "get_absolute_url", home_slug) items = {home.get_absolute_url(): home} for model in get_models(): if issubclass(model, Displayable): for item in (model.objects.published() .exclude(slug__startswith="http://") .exclude(slug__startswith="https://")): items[item.get_absolute_url()] = item return items.values() def lastmod(self, obj): if blog_installed and isinstance(obj, BlogPost): return obj.publish_date def get_urls(self, **kwargs): """ Ensure the correct host by injecting the current site. """ kwargs["site"] = Site.objects.get(id=current_site_id()) return super(DisplayableSitemap, self).get_urls(**kwargs)
Add handling for multi-tenancy in sitemap.xml
Add handling for multi-tenancy in sitemap.xml
Python
bsd-2-clause
AlexHill/mezzanine,readevalprint/mezzanine,gradel/mezzanine,wyzex/mezzanine,wrwrwr/mezzanine,wyzex/mezzanine,webounty/mezzanine,christianwgd/mezzanine,nikolas/mezzanine,ryneeverett/mezzanine,tuxinhang1989/mezzanine,molokov/mezzanine,adrian-the-git/mezzanine,mush42/mezzanine,adrian-the-git/mezzanine,Cicero-Zhao/mezzanine,wbtuomela/mezzanine,jjz/mezzanine,fusionbox/mezzanine,molokov/mezzanine,Cajoline/mezzanine,stephenmcd/mezzanine,spookylukey/mezzanine,jjz/mezzanine,Cicero-Zhao/mezzanine,douglaskastle/mezzanine,orlenko/sfpirg,orlenko/plei,fusionbox/mezzanine,cccs-web/mezzanine,spookylukey/mezzanine,stbarnabas/mezzanine,eino-makitalo/mezzanine,promil23/mezzanine,damnfine/mezzanine,geodesign/mezzanine,vladir/mezzanine,orlenko/sfpirg,vladir/mezzanine,wyzex/mezzanine,frankchin/mezzanine,PegasusWang/mezzanine,sjuxax/mezzanine,dsanders11/mezzanine,webounty/mezzanine,tuxinhang1989/mezzanine,frankier/mezzanine,saintbird/mezzanine,christianwgd/mezzanine,orlenko/plei,sjdines/mezzanine,mush42/mezzanine,orlenko/sfpirg,cccs-web/mezzanine,Kniyl/mezzanine,promil23/mezzanine,dustinrb/mezzanine,viaregio/mezzanine,gradel/mezzanine,scarcry/snm-mezzanine,saintbird/mezzanine,SoLoHiC/mezzanine,frankier/mezzanine,adrian-the-git/mezzanine,jjz/mezzanine,SoLoHiC/mezzanine,dsanders11/mezzanine,jerivas/mezzanine,viaregio/mezzanine,ZeroXn/mezzanine,agepoly/mezzanine,theclanks/mezzanine,stephenmcd/mezzanine,damnfine/mezzanine,dovydas/mezzanine,Cajoline/mezzanine,agepoly/mezzanine,tuxinhang1989/mezzanine,gradel/mezzanine,industrydive/mezzanine,PegasusWang/mezzanine,promil23/mezzanine,joshcartme/mezzanine,Skytorn86/mezzanine,scarcry/snm-mezzanine,douglaskastle/mezzanine,wrwrwr/mezzanine,sjuxax/mezzanine,christianwgd/mezzanine,frankchin/mezzanine,dekomote/mezzanine-modeltranslation-backport,industrydive/mezzanine,Skytorn86/mezzanine,wbtuomela/mezzanine,Kniyl/mezzanine,webounty/mezzanine,saintbird/mezzanine,theclanks/mezzanine,emile2016/mezzanine,AlexHill/mezzanine,eino-makitalo/mezzanine,sjdines/mezzanine,theclanks/mezzanine,stbarnabas/mezzanine,vladir/mezzanine,ryneeverett/mezzanine,nikolas/mezzanine,dustinrb/mezzanine,sjdines/mezzanine,sjuxax/mezzanine,geodesign/mezzanine,geodesign/mezzanine,dsanders11/mezzanine,industrydive/mezzanine,damnfine/mezzanine,biomassives/mezzanine,dovydas/mezzanine,ryneeverett/mezzanine,Skytorn86/mezzanine,douglaskastle/mezzanine,eino-makitalo/mezzanine,frankchin/mezzanine,stephenmcd/mezzanine,ZeroXn/mezzanine,dekomote/mezzanine-modeltranslation-backport,biomassives/mezzanine,emile2016/mezzanine,SoLoHiC/mezzanine,wbtuomela/mezzanine,jerivas/mezzanine,orlenko/plei,biomassives/mezzanine,ZeroXn/mezzanine,spookylukey/mezzanine,batpad/mezzanine,batpad/mezzanine,Cajoline/mezzanine,emile2016/mezzanine,viaregio/mezzanine,readevalprint/mezzanine,joshcartme/mezzanine,joshcartme/mezzanine,agepoly/mezzanine,readevalprint/mezzanine,Kniyl/mezzanine,mush42/mezzanine,scarcry/snm-mezzanine,molokov/mezzanine,frankier/mezzanine,jerivas/mezzanine,nikolas/mezzanine,PegasusWang/mezzanine,dovydas/mezzanine,dekomote/mezzanine-modeltranslation-backport,dustinrb/mezzanine
c45fc698da9783b561cca69363ec4998622e9ac0
mint/rest/db/capsulemgr.py
mint/rest/db/capsulemgr.py
# # Copyright (c) 2009 rPath, Inc. # # All Rights Reserved # from conary.lib import util from mint.rest.db import manager import rpath_capsule_indexer class CapsuleManager(manager.Manager): def getIndexerConfig(self): capsuleDataDir = util.joinPaths(self.cfg.dataPath, 'capsules') cfg = rpath_capsule_indexer.IndexerConfig() cfg.configLine("store sqlite:///%s/database.sqlite" % capsuleDataDir) cfg.configLine("indexDir %s/packages" % capsuleDataDir) cfg.configLine("systemsPath %s/systems" % capsuleDataDir) dataSources = self.db.platformMgr.listPlatformSources().platformSource # XXX we only deal with RHN for now if dataSources: cfg.configLine("user RHN %s %s" % (dataSources[0].username, dataSources[0].password)) # XXX channels are hardcoded for now cfg.configLine("channels rhel-i386-as-4") cfg.configLine("channels rhel-x86_64-as-4") cfg.configLine("channels rhel-i386-server-5") cfg.configLine("channels rhel-x86_64-server-5") util.mkdirChain(capsuleDataDir) return cfg def getIndexer(self): cfg = self.getIndexerConfig() return rpath_capsule_indexer.Indexer(cfg)
# # Copyright (c) 2009 rPath, Inc. # # All Rights Reserved # from conary.lib import util from mint.rest.db import manager import rpath_capsule_indexer class CapsuleManager(manager.Manager): def getIndexerConfig(self): capsuleDataDir = util.joinPaths(self.cfg.dataPath, 'capsules') cfg = rpath_capsule_indexer.IndexerConfig() dbDriver = self.db.db.driver dbConnectString = self.db.db.db.database cfg.configLine("store %s:///%s" % (dbDriver, dbConnectString)) cfg.configLine("indexDir %s/packages" % capsuleDataDir) cfg.configLine("systemsPath %s/systems" % capsuleDataDir) dataSources = self.db.platformMgr.listPlatformSources().platformSource # XXX we only deal with RHN for now if dataSources: cfg.configLine("user RHN %s %s" % (dataSources[0].username, dataSources[0].password)) # XXX channels are hardcoded for now cfg.configLine("channels rhel-i386-as-4") cfg.configLine("channels rhel-x86_64-as-4") cfg.configLine("channels rhel-i386-server-5") cfg.configLine("channels rhel-x86_64-server-5") util.mkdirChain(capsuleDataDir) return cfg def getIndexer(self): cfg = self.getIndexerConfig() return rpath_capsule_indexer.Indexer(cfg)
Use the mint database for capsule data
Use the mint database for capsule data
Python
apache-2.0
sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint
97438a877f443980a239c1e960aaf1aa0ddc0469
inventory/migrations/0002_auto_20171121_2043.py
inventory/migrations/0002_auto_20171121_2043.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2017-11-21 20:43 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('inventory', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='item', name='place', ), migrations.AddField( model_name='item', name='column', field=models.IntegerField(null=True), ), migrations.AddField( model_name='item', name='row', field=models.IntegerField(null=True), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2017-11-21 20:43 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('inventory', '0001_initial'), ] operations = [ migrations.AddField( model_name='item', name='column', field=models.IntegerField(null=True), ), migrations.AddField( model_name='item', name='row', field=models.IntegerField(null=True), ), ]
Fix that awful inventory migration issue
Fix that awful inventory migration issue
Python
mit
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
b4995ae809995a4e530bacb8e8732caf1b10d7ee
py2deb/__init__.py
py2deb/__init__.py
# py2deb: Python to Debian package converter. # # Authors: # - Arjan Verwer # - Peter Odding <[email protected]> # Last Change: March 18, 2015 # URL: https://py2deb.readthedocs.org """ The top level :py:mod:`py2deb` module contains only a version number. .. data:: __version__ The version number of the `pydeb` package (a string). """ # Semi-standard module versioning. __version__ = '0.20.11'
# py2deb: Python to Debian package converter. # # Authors: # - Arjan Verwer # - Peter Odding <[email protected]> # Last Change: April 4, 2015 # URL: https://py2deb.readthedocs.org """ The top level :py:mod:`py2deb` module contains only a version number. .. data:: __version__ The version number of the `pydeb` package (a string). """ # Semi-standard module versioning. __version__ = '0.21'
Prepare to release 0.21 (with pip-accel 0.25 and pip 6)
Prepare to release 0.21 (with pip-accel 0.25 and pip 6)
Python
mit
paylogic/py2deb,paylogic/py2deb
21059428d95c27cf043ada2e299a4cf3982a4233
python/printbag.py
python/printbag.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """Convert a rosbag file to legacy lidar binary format. """ """LIDAR datatype format is: ( timestamp (long), flag (bool saved as int), accelerometer[3] (double), gps[3] (double), distance[LIDAR_NUM_ANGLES] (long), ) 'int' and 'long' are the same size on the raspberry pi (32 bits). """ import sys import rosbag def decode_bag(bag): topics = ['/scan', '/flagbutton_pressed'] return [message for message in bag.read_messages(topics=topics)] if __name__ == '__main__': if len(sys.argv) < 2: print(('Usage: {} <rosbag> [<outfile>] \n\n' 'Print contents of rosbag file. If <outfile> is provided, \n' 'write contents of rosbag file to <outfile> in the legacy \n' 'lidar binary format.').format(__file__)) sys.exit(1) outfile = None filename = sys.argv[1] if len(sys.argv) == 3: outfile = sys.argv[2] with rosbag.Bag(filename) as bag: print(decode_bag(bag)) sys.exit()
#!/usr/bin/env python # -*- coding: utf-8 -*- """Convert a rosbag file to legacy lidar binary format. """ """LIDAR datatype format is: ( timestamp (long), flag (bool saved as int), accelerometer[3] (double), gps[3] (double), distance[LIDAR_NUM_ANGLES] (long), ) 'int' and 'long' are the same size on the raspberry pi (32 bits). """ import sys import rosbag def print_bag(bag): topics = ['/scan', '/flagbutton_pressed'] for message in bag.read_messages(topics=topics): print(message) if __name__ == '__main__': if len(sys.argv) < 2: print(('Usage: {} <rosbag> [<outfile>] \n\n' 'Print contents of rosbag file. If <outfile> is provided, \n' 'write contents of rosbag file to <outfile> in the legacy \n' 'lidar binary format.').format(__file__)) sys.exit(1) outfile = None filename = sys.argv[1] if len(sys.argv) == 3: outfile = sys.argv[2] with rosbag.Bag(filename) as bag: print_bag(bag) sys.exit()
Print out bag contents for lidar and button topics
Print out bag contents for lidar and button topics
Python
bsd-2-clause
oliverlee/antlia
2d61b9d0793d12a11167812bf2f3721170ba7299
vpr/vpr/settings/dev.py
vpr/vpr/settings/dev.py
# Django settings for vpr project. from base import * from logger import * DEBUG = True DEVELOPMENT = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { #'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. #'NAME': 'vpr.sqlite3', # Or path to database file if using sqlite3. 'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'vpr_dev', # Or path to database file if using sqlite3. 'USER': 'vpr', # Not used with sqlite3. 'PASSWORD': 'vpr', # Not used with sqlite3. 'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '3306', # Set to empty string for default. Not used with sqlite3. } } # Make this unique, and don't share it with anybody. #SECRET_KEY = 'kw7#s$8t&amp;6d9*7*$a$(gui0r1ze7f#u%(hua=^a3u66+vyj+9g' ROOT_URLCONF = 'vpr.urls.dev' INSTALLED_APPS += ( 'django_extensions', )
# Django settings for vpr project. from base import * DEBUG = True DEVELOPMENT = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { #'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. #'NAME': 'vpr.sqlite3', # Or path to database file if using sqlite3. 'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'vpr_dev', # Or path to database file if using sqlite3. 'USER': 'vpr', # Not used with sqlite3. 'PASSWORD': 'vpr', # Not used with sqlite3. 'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '3306', # Set to empty string for default. Not used with sqlite3. } } # Make this unique, and don't share it with anybody. #SECRET_KEY = 'kw7#s$8t&amp;6d9*7*$a$(gui0r1ze7f#u%(hua=^a3u66+vyj+9g' ROOT_URLCONF = 'vpr.urls.dev' INSTALLED_APPS += ( 'django_extensions', )
Fix wrong import of logger
Fix wrong import of logger
Python
agpl-3.0
voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo
78cca16df6a5cdd90ec92e64455215c4b7292fae
report_coverage.py
report_coverage.py
#!/usr/bin/env python # coding: utf-8 import json import os import sys from coveralls import Coveralls, cli # Patch coveralls to get javascript coverage from mocha orig_get_coverage = Coveralls.get_coverage def get_coverage_with_js(self): report = orig_get_coverage(self) js_files = json.load(open('.coverage-js'))['files'] js_report = [] for f in js_files: source = '\n'.join(open(f['filename']).readlines()) name = os.path.relpath(f['filename']) coverage = [] for v in f['source'].values(): coverage.append(v['coverage'] if v['coverage'] != '' else None) js_report.append({ 'source': source, 'name': name, 'coverage': coverage} ) report += js_report return report Coveralls.get_coverage = get_coverage_with_js cli.main(sys.argv[1:])
#!/usr/bin/env python # coding: utf-8 import json import os import sys from coveralls import Coveralls, cli # Patch coveralls to get javascript coverage from mocha orig_get_coverage = Coveralls.get_coverage def get_coverage_with_js(self): report = orig_get_coverage(self) js_files = json.load(open('.coverage-js'))['files'] js_report = [] for f in js_files: source = '\n'.join(open(f['filename']).readlines()) name = os.path.relpath(f['filename']) coverage = [] # Create sorted coverage array from original dict for k, v in sorted(f['source'].items(), key=lambda x:int(x[0])): coverage.append(v['coverage'] if v['coverage'] != '' else None) js_report.append({ 'source': source, 'name': name, 'coverage': coverage} ) report += js_report return report Coveralls.get_coverage = get_coverage_with_js cli.main(sys.argv[1:])
Sort line coverage info when reporting
Sort line coverage info when reporting
Python
apache-2.0
exekias/django-achilles,exekias/django-achilles
9ee79dbbddb07e06948cf68f9a38d94bbbcc00da
index.py
index.py
from nltk.tokenize import word_tokenize, sent_tokenize import getopt import sys import os import io def usage(): print("usage: " + sys.argv[0] + " -i directory-of-documents -d dictionary-file -p postings-file") if __name__ == '__main__': dir_doc = dict_file = postings_file = None try: opts, args = getopt.getopt(sys.argv[1:], 'i:d:p:') except getopt.GetoptError as err: usage() sys.exit(2) for o, a in opts: if o == '-i': dir_doc = a elif o == '-d': dict_file = a elif o == '-p': postings_file = a else: assert False, "unhandled option" if dir_doc == None or dict_file == None or postings_file == None: usage() sys.exit(2)
from nltk.tokenize import word_tokenize, sent_tokenize import getopt import sys import os import io def load_data(dir_doc): docs = {} for dirpath, dirnames, filenames in os.walk(dir_doc): for name in filenames: file = os.path.join(dirpath, name) with io.open(file, 'r+') as f: docs[name] = f.read() return docs def usage(): print("usage: " + sys.argv[0] + " -i directory-of-documents -d dictionary-file -p postings-file") if __name__ == '__main__': dir_doc = dict_file = postings_file = None try: opts, args = getopt.getopt(sys.argv[1:], 'i:d:p:') except getopt.GetoptError as err: usage() sys.exit(2) for o, a in opts: if o == '-i': dir_doc = a elif o == '-d': dict_file = a elif o == '-p': postings_file = a else: assert False, "unhandled option" if dir_doc == None or dict_file == None or postings_file == None: usage() sys.exit(2) load_data(dir_doc)
Implement function to load data from directory
Implement function to load data from directory
Python
mit
ikaruswill/vector-space-model,ikaruswill/boolean-retrieval
570792ad4ce15ae0817b87f95e9c55be9a18451d
setup.py
setup.py
#!/usr/bin/python from setuptools import setup, find_packages with open('requirements.txt') as f: requirements = f.read().splitlines() setup( name = "docker-scripts", version = "0.3.0", packages = find_packages(), url='https://github.com/goldmann/docker-scripts', author='Marek Goldmann', author_email='[email protected]', description = 'A swiss-knife tool that could be useful for people working with Docker', license='MIT', keywords = 'docker', entry_points={ 'console_scripts': ['docker-scripts=docker_scripts.cli.main:run'], }, install_requires=requirements )
#!/usr/bin/python from setuptools import setup, find_packages with open('requirements.txt') as f: requirements = f.read().splitlines() setup( name = "docker-scripts", version = "0.3.0", packages = find_packages(), url='https://github.com/goldmann/docker-scripts', author='Marek Goldmann', author_email='[email protected]', description = 'A swiss-knife tool that could be useful for people working with Docker', license='MIT', keywords = 'docker', long_description=open('README.md').read(), entry_points={ 'console_scripts': ['docker-scripts=docker_scripts.cli.main:run'], }, install_requires=requirements )
Set long description to current README.md content
Set long description to current README.md content
Python
mit
goldmann/docker-scripts,jpopelka/docker-scripts,TomasTomecek/docker-scripts,goldmann/docker-squash,lichia/docker-scripts
f4c77f6e770bc8c5a8f16168a655f8e3a25a3d10
setup.py
setup.py
from setuptools import setup setup(name='python-five91', version='0.1', description='Simple 591.com.tw wrapper', url='http://github.com/tzengyuxio/python-five91', author='Tzeng Yuxio', author_email='[email protected]', license='MIT', packages=['five91'], install_requires=[ 'beautifulsoup4', ], zip_safe=False)
from setuptools import setup setup(name='five91', version='0.1', description='Simple 591.com.tw wrapper', url='http://github.com/tzengyuxio/python-five91', author='Tzeng Yuxio', author_email='[email protected]', license='MIT', packages=['five91'], install_requires=[ 'beautifulsoup4', ], zip_safe=False)
Change to a simple, shorter package name
Change to a simple, shorter package name
Python
mit
tzengyuxio/python-five91
94c55abd918cae7aba02d729c44067ad006ee20d
setup.py
setup.py
# -*- coding: utf-8 -*- import os from setuptools import setup def read(fname): try: return open(os.path.join(os.path.dirname(__file__), fname)).read() except: return '' setup( name='todoist-python', version='8.0.2', packages=['todoist', 'todoist.managers'], author='Doist Team', author_email='[email protected]', license='BSD', description='todoist-python - The official Todoist Python API library', long_description = read('README.md'), install_requires=[ 'requests', ], # see here for complete list of classifiers # http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=( 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', ), )
# -*- coding: utf-8 -*- import os from setuptools import setup def read(fname): try: return open(os.path.join(os.path.dirname(__file__), fname)).read() except: return '' setup( name='todoist-python', version='8.1.0', packages=['todoist', 'todoist.managers'], author='Doist Team', author_email='[email protected]', license='BSD', description='todoist-python - The official Todoist Python API library', long_description = read('README.md'), install_requires=[ 'requests', ], # see here for complete list of classifiers # http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=( 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', ), )
Update the PyPI version to 8.1.0.
Update the PyPI version to 8.1.0.
Python
mit
Doist/todoist-python
5f80be3f779b6cae4c7215b26e7f5e1cf9a262ec
setup.py
setup.py
from setuptools import setup REPO_URL = 'http://github.com/okfn-brasil/serenata-toolbox' with open('README.rst') as fobj: long_description = fobj.read() setup( author='Serenata de Amor', author_email='[email protected]', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.6', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'python-decouple>=3.1', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description=long_description, name='serenata-toolbox', packages=[ 'serenata_toolbox', 'serenata_toolbox.federal_senate', 'serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets' ], scripts=['serenata_toolbox/serenata-toolbox'], url=REPO_URL, python_requires='>=3.6', version='15.1.4', )
from setuptools import setup REPO_URL = 'http://github.com/okfn-brasil/serenata-toolbox' with open('README.rst') as fobj: long_description = fobj.read() setup( author='Serenata de Amor', author_email='[email protected]', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Topic :: Utilities', ], description='Toolbox for Serenata de Amor project', zip_safe=False, install_requires=[ 'aiofiles', 'aiohttp', 'beautifulsoup4>=4.4', 'lxml>=3.6', 'pandas>=0.18', 'python-decouple>=3.1', 'tqdm' ], keywords='serenata de amor, data science, brazil, corruption', license='MIT', long_description=long_description, name='serenata-toolbox', packages=[ 'serenata_toolbox', 'serenata_toolbox.federal_senate', 'serenata_toolbox.chamber_of_deputies', 'serenata_toolbox.datasets' ], scripts=['serenata_toolbox/serenata-toolbox'], url=REPO_URL, python_requires='>=3.6', version='15.1.5', )
Allow the toolbox to be installed in Python 3.7
Allow the toolbox to be installed in Python 3.7
Python
mit
datasciencebr/serenata-toolbox
c4d1aa9261a37e213676dd1d7943c96b0cb273db
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='opps-admin', version='0.1', description='Opps Admin, drop-in replacement of Django admin comes with lots of goodies, fully extensible with plugin support, pretty UI based on Twitter Bootstrap.', long_description=open('README.rst').read(), author='sshwsfc', url='http://www.oppsproject.org', download_url='http://github.com/opps/opps-admin/tarball/master', packages=find_packages(exclude=('doc', 'docs',)), include_package_data=True, install_requires=[ 'setuptools', 'django>=1.4', 'xlwt', 'django-crispy-forms>=1.2.3', 'django-reversion', ], zip_safe=True, keywords=['admin', 'django', 'xadmin', 'bootstrap'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', "Programming Language :: JavaScript", 'Programming Language :: Python', "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", ] )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='opps-admin', version='0.1', description='Opps Admin, drop-in replacement of Django admin comes with lots of goodies, fully extensible with plugin support, pretty UI based on Twitter Bootstrap.', long_description=open('README.rst').read(), author='sshwsfc', url='http://www.oppsproject.org', download_url='http://github.com/opps/opps-admin/tarball/master', packages=find_packages(exclude=('doc', 'docs',)), include_package_data=True, install_requires=[ 'setuptools', 'opps>=0.2', 'xlwt', 'django-crispy-forms>=1.2.3', ], zip_safe=True, keywords=['admin', 'django', 'xadmin', 'bootstrap', 'opps', 'opps-admin'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', "Programming Language :: JavaScript", 'Programming Language :: Python', "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: Dynamic Content", "Topic :: Software Development :: Libraries :: Python Modules", ] )
Update install requires, add opps >= 0.2
Update install requires, add opps >= 0.2
Python
bsd-3-clause
jeanmask/opps-admin
ea0845828e267583847fa7d288f524289a5c9697
setup.py
setup.py
#!/usr/bin/python from distutils.core import setup setup( # Basic package information. name = 'Zendesk', author = 'Max Gutman, Stefan Tjarks', version = '1.1.1', author_email = '[email protected]', packages = ['zendesk'], include_package_data = True, install_requires = ['httplib2', 'simplejson'], license='LICENSE.txt', url = 'https://github.com/maxeventbrite/zendesk/tree/master', keywords = 'zendesk api helpdesk', description = 'Python API Wrapper for Zendesk', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Internet' ], )
from setuptools import setup import sys extra = {} if sys.version_info >= (3,): extra['use_2to3'] = True setup( # Basic package information. name = 'Zendesk', author = 'Max Gutman, Stefan Tjarks', version = '1.1.1', author_email = '[email protected]', packages = ['zendesk'], include_package_data = True, install_requires = ['httplib2', 'simplejson'], license='LICENSE.txt', url = 'https://github.com/maxeventbrite/zendesk/tree/master', keywords = 'zendesk api helpdesk', description = 'Python API Wrapper for Zendesk', classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Internet', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', ], **extra )
Use 2to3 to install under Python3
Use 2to3 to install under Python3
Python
mit
fprimex/zdesk,laythun/zdesk,fprimex/zdgen,blade2005/zdesk
cfe724b3ddfc283d394885c8a6f1d410f204f87d
setup.py
setup.py
from setuptools import setup, find_packages setup( name="go_contacts", version="0.1.0a", url='http://github.com/praekelt/go-contacts-api', license='BSD', description="A contacts and groups API for Vumi Go", long_description=open('README.rst', 'r').read(), author='Praekelt Foundation', author_email='[email protected]', packages=find_packages(), include_package_data=True, install_requires=[ 'cyclone', 'go_api', 'vumi-go', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Internet :: WWW/HTTP', ], )
from setuptools import setup, find_packages setup( name="go_contacts", version="0.1.0a", url='http://github.com/praekelt/go-contacts-api', license='BSD', description="A contacts and groups API for Vumi Go", long_description=open('README.rst', 'r').read(), author='Praekelt Foundation', author_email='[email protected]', packages=find_packages(), include_package_data=True, install_requires=[ 'cyclone', 'go_api>=0.1.3', 'vumi-go', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: POSIX', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Internet :: WWW/HTTP', ], )
Add minimum version requirement on go_api.
Add minimum version requirement on go_api.
Python
bsd-3-clause
praekelt/go-contacts-api,praekelt/go-contacts-api
4ae8302a3d91ca1e9601e0c51cb58a69f1c08cb5
setup.py
setup.py
"""bibpy module setup script for distribution.""" from __future__ import with_statement import os import distutils.core def get_version(filename): with open(filename) as fh: for line in fh: if line.startswith('__version__'): return line.split('=')[-1].strip()[1:-1] distutils.core.setup( name='bibpy', version=get_version(os.path.join('bibpy', '__init__.py')), author='Alexander Asp Bock', author_email='[email protected]', platforms='All', description=('Bib(la)tex parsing and useful tools'), license='MIT', keywords='bibpy, bibtex, biblatex, parser', url='https://github.com/MisanthropicBit/bibpy', packages=['bibpy', 'bibpy.entry', 'bibpy.lexers', 'bibpy.parsers', 'bibpy.doi'], long_description=open('README.md').read(), scripts=['bin/bibgrep', 'bin/bibformat', 'bin/bibstats'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Utilities', 'Topic :: Software Development', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.5' 'Programming Language :: Python :: 3.6', ] )
"""bibpy module setup script for distribution.""" from __future__ import with_statement import os import distutils.core def get_version(filename): with open(filename) as fh: for line in fh: if line.startswith('__version__'): return line.split('=')[-1].strip()[1:-1] distutils.core.setup( name='bibpy', version=get_version(os.path.join('bibpy', '__init__.py')), author='Alexander Asp Bock', author_email='[email protected]', platforms='All', description=('Bib(la)tex parsing and useful tools'), license='MIT', keywords='bibpy, bibtex, biblatex, parser', url='https://github.com/MisanthropicBit/bibpy', packages=['bibpy', 'bibpy.entry', 'bibpy.lexers', 'bibpy.doi'], long_description=open('README.md').read(), scripts=['bin/bibgrep', 'bin/bibformat', 'bin/bibstats'], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Topic :: Utilities', 'Topic :: Software Development', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.5' 'Programming Language :: Python :: 3.6', ] )
Remove 'bibpy.parsers' from package list
Remove 'bibpy.parsers' from package list
Python
mit
MisanthropicBit/bibpy,MisanthropicBit/bibpy
ce73590211ce31a2a26255b7e4ea873c7b8cd843
setup.py
setup.py
from setuptools import setup setup(name="save_skype", version="0.1", description="Extract and save Skype chats.", url="https://github.com/thismachinechills/save_skype", author="thismachinechills (Alex)", license="AGPL 3.0", packages=['save_skype'], zip_safe=True, install_requires=["click", "html_wrapper"], keywords="skype main.db extract chats".split(' '), entry_points={"console_scripts": ["save_skype = save_skype.extract:cmd"]})
from setuptools import setup with open('requirements.txt', 'r') as file: requirements = file.readlines() setup(name="save_skype", version="0.1.1", description="Extract and save Skype chats.", url="https://github.com/thismachinechills/save_skype", author="thismachinechills (Alex)", license="AGPL 3.0", packages=['save_skype'], zip_safe=True, install_requires=requirements, keywords="skype main.db extract chats".split(' '), entry_points={"console_scripts": ["save_skype = save_skype.extract:cmd"]})
Use requirement.txt entries to populate package requirements
Use requirement.txt entries to populate package requirements
Python
agpl-3.0
thismachinechills/save_skype
ea91df1d178031bccc29dbf4a17cdd02fbb05953
setup.py
setup.py
from setuptools import setup, find_packages with open('README.md') as f: long_description = f.read() setup( name='jiradoc', version='0.1', description='A JIRAdoc parser', long_description=long_description, url='https://github.com/lucianovdveekens/jiradoc', author='Luciano van der Veekens', author_email='[email protected]', packages=find_packages(), install_requires=['argparse', 'ply'], package_data={ 'jiradoc': ['data/test.jiradoc'] }, entry_points={ 'console_scripts': [ 'jiradoc=jiradoc.__main__:main', ], }, )
from setuptools import setup, find_packages with open('README.md') as f: long_description = f.read() setup( name='jiradoc', version='0.1', description='A JIRAdoc parser', long_description=long_description, url='https://github.com/lucianovdveekens/jiradoc', author='Luciano van der Veekens', author_email='[email protected]', packages=find_packages(), install_requires=['ply'], package_data={ 'jiradoc': ['data/test.jiradoc'] }, entry_points={ 'console_scripts': [ 'jiradoc=jiradoc.__main__:main', ], }, )
Remove an install_requires library which is already part of Python since 2.7
Remove an install_requires library which is already part of Python since 2.7
Python
mit
lucianovdveekens/jiradoc
b2026158e0aec2c79bba9e61ff02c14d42166b20
setup.py
setup.py
from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='[email protected]', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.5', 'kombu==1.5.1', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests==0.6.4', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', )
from setuptools import setup, find_packages from ckanext.qa import __version__ setup( name='ckanext-qa', version=__version__, description='Quality Assurance plugin for CKAN', long_description='', classifiers=[], keywords='', author='Open Knowledge Foundation', author_email='[email protected]', url='http://ckan.org/wiki/Extensions', license='mit', packages=find_packages(exclude=['tests']), namespace_packages=['ckanext', 'ckanext.qa'], include_package_data=True, zip_safe=False, install_requires=[ 'celery==2.4.2', 'kombu==2.1.3', 'kombu-sqlalchemy==1.1.0', 'SQLAlchemy>=0.6.6', 'requests==0.6.4', ], tests_require=[ 'nose', 'mock', ], entry_points=''' [paste.paster_command] qa=ckanext.qa.commands:QACommand [ckan.plugins] qa=ckanext.qa.plugin:QAPlugin [ckan.celery_task] tasks=ckanext.qa.celery_import:task_imports ''', )
Change celery and kombu requirements to match ckanext-datastorer
Change celery and kombu requirements to match ckanext-datastorer
Python
mit
ckan/ckanext-qa,ckan/ckanext-qa,ckan/ckanext-qa
73f7502f1fda11bc23469c6f3e8f79b0e375c928
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup setup(name='redis-dump-load', version='0.2.0', description='Dump and load redis databases', author='Oleg Pudeyev', author_email='[email protected]', url='http://github.com/p/redis-dump-load', py_modules=['redisdl'], )
#!/usr/bin/env python from distutils.core import setup setup(name='redis-dump-load', version='0.2.0', description='Dump and load redis databases', author='Oleg Pudeyev', author_email='[email protected]', url='http://github.com/p/redis-dump-load', py_modules=['redisdl'], data_files=['LICENSE', 'README.rst'], )
Add license and readme to the packages
Add license and readme to the packages
Python
bsd-2-clause
hyunchel/redis-dump-load,p/redis-dump-load,p/redis-dump-load,hyunchel/redis-dump-load
6d8616ff6d163716366e506d5a7c50a5466987bf
setup.py
setup.py
from setuptools import setup, find_packages from Cython.Build import cythonize setup( name='python-wallpaper', version='0.2.1', url='https://github.com/ondergetekende/python-wallpaper', description=( 'python-wallpaper generates pseudorandom abstract wallpapers' ), author='Koert van der Veer', author_email='[email protected]', packages=find_packages(), ext_modules=cythonize("wallpaper/*.pyx"), install_requires=[], classifiers=[ 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], entry_points={ 'console_scripts': [ 'generate-wallpaper = wallpapers:cmdline' ] }, )
from setuptools import setup, find_packages from Cython.Build import cythonize setup( name='python-wallpaper', version='0.2.2', url='https://github.com/ondergetekende/python-wallpaper', description=( 'python-wallpaper generates pseudorandom abstract wallpapers' ), author='Koert van der Veer', author_email='[email protected]', packages=find_packages(), ext_modules=cythonize("wallpaper/*.pyx"), install_requires=["cython"], classifiers=[ 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], entry_points={ 'console_scripts': [ 'generate-wallpaper = wallpapers:cmdline' ] }, )
Add cython as a dependency - v0.2.2
Add cython as a dependency - v0.2.2
Python
mit
ondergetekende/python-panavatar
2a842d4b68bf2b8084852d4418e84beb00b5f0f0
setup.py
setup.py
from setuptools import setup long_description = "".join(open("README.rst").readlines()) setup( name="pytest-testmon", description="selects tests affected by changed files and methods", long_description=long_description, version="1.1.2", license="AGPL", platforms=["linux", "osx", "win32"], packages=[ "testmon", ], url="https://testmon.org", author_email="[email protected]", author="Tibor Arpas, Tomas Matlovic, Daniel Hahler, Martin Racak", entry_points={ "pytest11": [ "testmon = testmon.pytest_testmon", ], "tox": [ "testmon = testmon.tox_testmon", ], }, python_requires=">=3.6", install_requires=["pytest>=5,<7", "coverage>=4,<6"], classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Testing", "Topic :: Software Development :: Libraries", "Topic :: Utilities", "Programming Language :: Python", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3 :: Only", ], )
from setuptools import setup long_description = "".join(open("README.rst").readlines()) setup( name="pytest-testmon", description="selects tests affected by changed files and methods", long_description=long_description, version="1.1.2", license="AGPL", platforms=["linux", "osx", "win32"], packages=[ "testmon", ], url="https://testmon.org", author_email="[email protected]", author="Tibor Arpas, Tomas Matlovic, Daniel Hahler, Martin Racak", entry_points={ "pytest11": [ "testmon = testmon.pytest_testmon", ], "tox": [ "testmon = testmon.tox_testmon", ], }, python_requires=">=3.6", install_requires=["pytest>=5,<7", "coverage>=4,<7"], classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Testing", "Topic :: Software Development :: Libraries", "Topic :: Utilities", "Programming Language :: Python", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3 :: Only", ], )
Allow coverage version 6+ .
Allow coverage version 6+ .
Python
agpl-3.0
tarpas/pytest-testmon
6e0dd9111083258023758fbec2d597d898603ba4
setup.py
setup.py
from setuptools import setup import proxyprefix setup( name='proxyprefix', version=proxyprefix.__version__, description='Prefix SCRIPT_NAME with X-Forwarded-Prefix header', long_description=proxyprefix.__doc__, author='Yola', author_email='[email protected]', license='MIT (Expat)', url='https://github.com/yola/proxyprefix', packages=['proxyprefix'], test_suite='nose.collector', classifiers=[ 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', ], extras_require = { 'djproxy': ['djproxy>=2.0.0'], }, )
from setuptools import find_packages, setup import proxyprefix setup( name='proxyprefix', version=proxyprefix.__version__, description='Prefix SCRIPT_NAME with X-Forwarded-Prefix header', long_description=proxyprefix.__doc__, author='Yola', author_email='[email protected]', license='MIT (Expat)', url='https://github.com/yola/proxyprefix', packages=find_packages(exclude=['tests', 'tests.*']), test_suite='nose.collector', classifiers=[ 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', ], extras_require = { 'djproxy': ['djproxy>=2.0.0'], }, )
Include contrib module in installed package
Include contrib module in installed package See https://github.com/yola/yolacom/pull/1775#issuecomment-76513787
Python
mit
yola/proxyprefix
55ca47133f1cae601daf76484a12f13168083f18
setup.py
setup.py
import os from setuptools import setup def get_version(): """ Get the version from version module without importing more than necessary. """ version_module_path = os.path.join(os.path.dirname(__file__), "eliot", "_version.py") # The version module contains a variable called __version__ with open(version_module_path) as version_module: exec(version_module.read()) return locals()["__version__"] def read(path): """ Read the contents of a file. """ with open(path) as f: return f.read() setup( classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], name='eliot', version=get_version(), description="Logging as Storytelling", install_requires=["six"], keywords="logging", license="APL2", packages=["eliot", "eliot.tests"], url="https://github.com/HybridLogic/eliot/", maintainer='Itamar Turner-Trauring', maintainer_email='[email protected]', long_description=read('README.rst'), )
import os from setuptools import setup def get_version(): """ Get the version from version module without importing more than necessary. """ version_module_path = os.path.join(os.path.dirname(__file__), "eliot", "_version.py") # The version module contains a variable called __version__ with open(version_module_path) as version_module: exec(version_module.read()) return locals()["__version__"] def read(path): """ Read the contents of a file. """ with open(path) as f: return f.read() setup( classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', ], name='eliot', version=get_version(), description="Logging as Storytelling", install_requires=["six"], keywords="logging", license="APL2", packages=["eliot", "eliot.tests"], url="https://github.com/HybridLogic/eliot/", maintainer='Itamar Turner-Trauring', maintainer_email='[email protected]', long_description=read('README.rst'), )
Add Python 3 classifiers so users know this supports Python 3.
Add Python 3 classifiers so users know this supports Python 3.
Python
apache-2.0
ScatterHQ/eliot,iffy/eliot,ScatterHQ/eliot,ScatterHQ/eliot,ClusterHQ/eliot
b6c6d2fc69560a5dbab33fd24d61ecaf827710f9
setup.py
setup.py
from setuptools import setup, find_packages setup( name="punk", version="1.0.1", description="Primitives for Uncovering New Knowledge.", long_description="Machine Learning pipeline elements.", url="https://github.com/NewKnowledge/punk", author="New Knowledge", author_email="[email protected]", license="MIT", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Intended Audience :: Science/Research", # Pick your license as you wish (should match "license" above) "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.6", "Topic :: Scientific/Engineering :: Artificial Intelligence", ], keywords="TA1 primitive, feature selection, novelty detection", packages=find_packages(exclude=['tests']), install_requires=["numpy", "scikit-learn", "scipy"], )
from setuptools import setup, find_packages setup( name="punk", version="1.0.2", description="Primitives for Uncovering New Knowledge.", long_description="Machine Learning pipeline elements.", url="https://github.com/NewKnowledge/punk", author="New Knowledge", author_email="[email protected]", license="MIT", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Intended Audience :: Science/Research", # Pick your license as you wish (should match "license" above) "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.6", "Topic :: Scientific/Engineering :: Artificial Intelligence", ], keywords="TA1 primitive, feature selection, novelty detection", packages=find_packages(exclude=['tests']), install_requires=["numpy", "scikit-learn", "scipy"], )
Change author email and version
Change author email and version
Python
mit
NewKnowledge/punk,NewKnowledge/punk
bda028a2c5905ee42a07741c111ce1acc4ed433d
setup.py
setup.py
from setuptools import setup setup( name = "ironic-discoverd", version = "0.2.0", description = "Simple hardware discovery for OpenStack Ironic", author = "Dmitry Tantsur", author_email = "[email protected]", url = "https://github.com/Divius/ironic-discoverd/", packages = ['ironic_discoverd'], install_requires = ['Flask', 'python-ironicclient', 'eventlet', 'python-keystoneclient'], entry_points = {'console_scripts': ["ironic-discoverd = ironic_discoverd.main:main"]}, )
from setuptools import setup setup( name = "ironic-discoverd", version = "0.2.0", description = "Simple hardware discovery for OpenStack Ironic", author = "Dmitry Tantsur", author_email = "[email protected]", url = "https://github.com/Divius/ironic-discoverd/", packages = ['ironic_discoverd'], install_requires = ['Flask', 'python-ironicclient', 'eventlet', 'python-keystoneclient', 'requests', 'six'], entry_points = {'console_scripts': ["ironic-discoverd = ironic_discoverd.main:main"]}, )
Add requests and six as explicit dependencies
Add requests and six as explicit dependencies
Python
apache-2.0
rdo-management/ironic-discoverd,openstack/ironic-inspector,openstack/ironic-inspector,rdo-management/ironic-discoverd
ba9a0c496d5fb345b93f08e62dc095bc6c4d3d33
setup.py
setup.py
try: from setuptools import setup except ImportError: from distutils.core import setup config = { 'name' : 'Redis Grepper', 'description' : 'Perform regex searches through Redis values', 'author' : 'Ionut G. Stan', 'author_email' : '[email protected]', 'url' : 'http://github.com/igstan/regis-grep', 'download_url' : 'http://github.com/igstan/redis-grep/zipball/0.1.1', 'version' : '0.1.1', 'install_requires' : ['redis'], 'py_modules' : ['redisgrep'], 'scripts' : ['redis-grep'], } setup(**config)
try: from setuptools import setup except ImportError: from distutils.core import setup config = { 'name' : 'Redis Grepper', 'description' : 'Perform regex searches through Redis values', 'author' : 'Ionut G. Stan', 'author_email' : '[email protected]', 'url' : 'http://github.com/igstan/redis-grep', 'download_url' : 'http://github.com/igstan/redis-grep/zipball/0.1.1', 'version' : '0.1.1', 'install_requires' : ['redis'], 'py_modules' : ['redisgrep'], 'scripts' : ['redis-grep'], } setup(**config)
Fix broken GitHub repository URL
Fix broken GitHub repository URL
Python
bsd-2-clause
igstan/redis-grep
3ef9fc3c0f2c0210d41db1abb3ae4120fef298cf
setup.py
setup.py
#!/usr/bin/env python import os from setuptools import setup # Utility function to read the README file. def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = 'voltha', version = '1.3.0', author = 'Open Networking Foundation, et al', author_email = '[email protected]', description = ('Virtual Optical Line Terminal (OLT) Hardware Abstraction'), license = 'Apache License 2.0', keywords = 'volt gpon cord', url = 'https://gerrit.opencord.org/#/q/project:voltha', packages=['voltha', 'tests'], long_description=read('README.md'), classifiers=[ 'Development Status :: 3 - Alpha', 'Topic :: System :: Networking', 'Programming Language :: Python', 'License :: OSI Approved :: Apache License 2.0', ], )
#!/usr/bin/env python import os from setuptools import setup # Utility function to read the README file. def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = 'voltha', version = '2.0.0-dev', author = 'Open Networking Foundation, et al', author_email = '[email protected]', description = ('Virtual Optical Line Terminal (OLT) Hardware Abstraction'), license = 'Apache License 2.0', keywords = 'volt gpon cord', url = 'https://gerrit.opencord.org/#/q/project:voltha', packages=['voltha', 'tests'], long_description=read('README.md'), classifiers=[ 'Development Status :: 3 - Alpha', 'Topic :: System :: Networking', 'Programming Language :: Python', 'License :: OSI Approved :: Apache License 2.0', ], )
Bump package version forward to next development version
Bump package version forward to next development version Change-Id: Ia04ceb0e83d4927e75a863252571ed76f83b2ef1
Python
apache-2.0
opencord/voltha,opencord/voltha,opencord/voltha,opencord/voltha,opencord/voltha
dc7153da6ee0bad749f24fdd93fec5aeb66596a5
setup.py
setup.py
# Always prefer setuptools over distutils from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( # Application name: name="blynkapi", # Version number (initial): version="0.1.6", description="This is a simple blynk HTTP/HTTPS API wrapper.", long_description=long_description, #URL url='https://github.com/xandr2/blynkapi', download_url = 'https://github.com/xandr2/blynkapi/archive/0.1.6.tar.gz', # Application author details: author="Alexandr Borysov", author_email="[email protected]", # License license='MIT', keywords=['python', 'blynk', 'HTTP/HTTPS', 'API', 'wrapper'], # Packages packages=["blynkapi"], # Include additional files into the package #include_package_data=True, # # license="LICENSE.txt", # long_description=open("README.txt").read(), # Dependent packages (distributions) #install_requires=[ # "urllib2", #], classifiers = [], )
# Always prefer setuptools over distutils from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( # Application name: name="blynkapi", # Version number (initial): version="0.1.7", description="This is a simple blynk HTTP/HTTPS API wrapper.", long_description=long_description, #URL url='https://github.com/xandr2/blynkapi', download_url = 'https://github.com/xandr2/blynkapi/archive/0.1.7.tar.gz', # Application author details: author="Alexandr Borysov", author_email="[email protected]", # License license='MIT', keywords=['python', 'blynk', 'HTTP/HTTPS', 'API', 'wrapper'], # Packages packages=["blynkapi"], # Include additional files into the package #include_package_data=True, # # license="LICENSE.txt", # long_description=open("README.txt").read(), # Dependent packages (distributions) #install_requires=[ # "urllib2", #], classifiers = [], )
Add old method for setting val to pin named set_val_old
Add old method for setting val to pin named set_val_old
Python
mit
xandr2/blynkapi
be3f74bcd4d6fed5aaec3d8df62854a1078a1383
setup.py
setup.py
from distutils.core import setup import skyfield # to learn the version setup( name='skyfield', version=skyfield.__version__, description=skyfield.__doc__, long_description=open('README.rst').read(), license='MIT', author='Brandon Rhodes', author_email='[email protected]', url='http://github.com/brandon-rhodes/python-skyfield/', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Topic :: Scientific/Engineering :: Astronomy', ], packages=[ 'skyfield', 'skyfield.tests' ], package_data = {'skyfield': ['documentation/*.rst']}, install_requires=['de421==2008', 'jplephem', 'numpy', 'sgp4', 'requests==1.2.3'], )
from distutils.core import setup import skyfield # to learn the version setup( name='skyfield', version=skyfield.__version__, description=skyfield.__doc__, long_description=open('README.rst').read(), license='MIT', author='Brandon Rhodes', author_email='[email protected]', url='http://github.com/brandon-rhodes/python-skyfield/', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Topic :: Scientific/Engineering :: Astronomy', ], packages=[ 'skyfield', 'skyfield.tests' ], package_data = {'skyfield': ['documentation/*.rst']}, install_requires=[ 'de421==2008.1', 'jplephem>=1.2', 'numpy', 'requests>=1.2.3', 'sgp4>=1.1', ])
Add ">=" numbers to jplephem and sgp4 dependencies
Add ">=" numbers to jplephem and sgp4 dependencies And, use de421's new pip-friendly version number "2008.1". And, move "requests" to a ">=" inequality.
Python
mit
exoanalytic/python-skyfield,skyfielders/python-skyfield,skyfielders/python-skyfield,ozialien/python-skyfield,GuidoBR/python-skyfield,exoanalytic/python-skyfield,ozialien/python-skyfield,GuidoBR/python-skyfield
0f4fa47d1460889dac184a139c3ae4af3ed21214
setup.py
setup.py
#!/usr/bin/env python3 from setuptools import find_packages, setup setup( name='django-afip', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='[email protected]', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), include_package_data=True, long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines() + [ 'Django>=1.8.4' ], extras_require={ 'docs': ['Sphinx', 'sphinx-autobuild'] }, use_scm_version={ 'version_scheme': 'post-release', 'write_to': 'django_afip/version.py', }, setup_requires=['setuptools_scm'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ] )
#!/usr/bin/env python3 from setuptools import find_packages, setup setup( name='django-afip', description='AFIP integration for django', author='Hugo Osvaldo Barrera', author_email='[email protected]', url='https://gitlab.com/hobarrera/django-afip', license='ISC', packages=find_packages(), include_package_data=True, long_description=open('README.rst').read(), install_requires=open('requirements.txt').read().splitlines() + [ 'Django>=1.8.4' ], extras_require={ 'docs': ['Sphinx', 'sphinx-autobuild'] }, use_scm_version={ 'version_scheme': 'post-release', 'write_to': 'django_afip/version.py', }, setup_requires=['setuptools_scm'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: ISC License (ISCL)', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ] )
Add classifier for our licence
Add classifier for our licence
Python
isc
hobarrera/django-afip,hobarrera/django-afip
eb25e3158090261988d454812d2d35ec93891771
setup.py
setup.py
# -*- coding: utf-8 -*- import sys from setuptools import setup, find_packages IS_PY3 = sys.version_info > (3,) install_requires = ( 'cnx-archive', 'cnx-epub', 'openstax-accounts', 'psycopg2', 'pyramid', 'pyramid_multiauth', ) tests_require = [ 'webtest', ] extras_require = { 'test': tests_require, } description = """\ Application for accepting publication requests to the Connexions Archive.""" if not IS_PY3: tests_require.append('mock') setup( name='cnx-publishing', version='0.1', author='Connexions team', author_email='[email protected]', url="https://github.com/connexions/cnx-publishing", license='LGPL, See also LICENSE.txt', description=description, install_requires=install_requires, tests_require=tests_require, extras_require=extras_require, test_suite='cnxpublishing.tests', packages=find_packages(), include_package_data=True, package_data={ 'cnxpublishing': ['sql/*.sql', 'sql/*/*.sql'], }, entry_points="""\ [paste.app_factory] main = cnxpublishing.main:main [console_scripts] cnx-publishing-initdb = cnxpublishing.scripts.initdb:main """, )
# -*- coding: utf-8 -*- import sys from setuptools import setup, find_packages IS_PY3 = sys.version_info > (3,) install_requires = ( 'cnx-archive', 'cnx-epub', 'openstax-accounts', 'psycopg2', 'pyramid>=1.5', 'pyramid_multiauth', ) tests_require = [ 'webtest', ] extras_require = { 'test': tests_require, } description = """\ Application for accepting publication requests to the Connexions Archive.""" if not IS_PY3: tests_require.append('mock') setup( name='cnx-publishing', version='0.1', author='Connexions team', author_email='[email protected]', url="https://github.com/connexions/cnx-publishing", license='LGPL, See also LICENSE.txt', description=description, install_requires=install_requires, tests_require=tests_require, extras_require=extras_require, test_suite='cnxpublishing.tests', packages=find_packages(), include_package_data=True, package_data={ 'cnxpublishing': ['sql/*.sql', 'sql/*/*.sql'], }, entry_points="""\ [paste.app_factory] main = cnxpublishing.main:main [console_scripts] cnx-publishing-initdb = cnxpublishing.scripts.initdb:main """, )
Install pyramid 1.5 or newer which has the new SignedCookieSessionFactory
Install pyramid 1.5 or newer which has the new SignedCookieSessionFactory
Python
agpl-3.0
Connexions/cnx-publishing,Connexions/cnx-publishing,Connexions/cnx-publishing
c987ed375da13f53928157f14528bed0c148eeac
tasks.py
tasks.py
import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): asyncio.set_event_loop(cls.loop) try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
Set implicit loop for Python <3.6
Set implicit loop for Python <3.6
Python
apache-2.0
Charcoal-SE/SmokeDetector,Charcoal-SE/SmokeDetector
c5490366235d4e8d4f77b46707a424375f2dec29
tasks.py
tasks.py
from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, }, })
from invocations.docs import docs, www, sites, watch_docs from invocations.testing import test, coverage, integration, watch_tests from invocations.packaging import vendorize, release from invoke import Collection from invoke.util import LOG_FORMAT ns = Collection( test, coverage, integration, vendorize, release, www, docs, sites, watch_docs, watch_tests ) ns.configure({ 'tests': { 'logformat': LOG_FORMAT, }, 'packaging': { 'sign': True, 'wheel': True, }, })
Use config options for packaging
Use config options for packaging
Python
bsd-2-clause
pfmoore/invoke,pyinvoke/invoke,mkusz/invoke,pyinvoke/invoke,mkusz/invoke,pfmoore/invoke
7dcd2c2aa1e2fd8f17e0b564f9b77375675ccd9a
metakernel/pexpect.py
metakernel/pexpect.py
# Convenience imports from pexpect from __future__ import absolute_import from pexpect import spawn, which, EOF, TIMEOUT
# Convenience imports from pexpect from __future__ import absolute_import from pexpect import which as which_base, is_executable_file, EOF, TIMEOUT import os try: from pexpect import spawn import pty except ImportError: pty = None def which(filename): '''This takes a given filename; tries to find it in the environment path; then checks if it is executable. This returns the full path to the filename if found and executable. Otherwise this returns None.''' # Special case where filename contains an explicit path. if os.path.dirname(filename) != '' and is_executable_file(filename): return filename if 'PATH' not in os.environ or os.environ['PATH'] == '': p = os.defpath else: p = os.environ['PATH'] pathlist = p.split(os.pathsep) for path in pathlist: ff = os.path.join(path, filename) if pty: if is_executable_file(ff): return ff else: pathext = os.environ.get('Pathext', '.exe;.com;.bat;.cmd') pathext = pathext.split(os.pathsep) + [''] for ext in pathext: if os.access(ff + ext, os.X_OK): return ff + ext return None
Add handling of which on Windows
Add handling of which on Windows
Python
bsd-3-clause
Calysto/metakernel