text
stringlengths 2
99k
| meta
dict |
---|---|
zram-objs := zram_drv.o xvmalloc.o
obj-$(CONFIG_ZRAM) += zram.o
| {
"pile_set_name": "Github"
} |
var convert = require('./convert'),
func = convert('take', require('../take'));
func.placeholder = require('./placeholder');
module.exports = func;
| {
"pile_set_name": "Github"
} |
import Foundation
/// A Nimble matcher that succeeds when the actual sequence's last element
/// is equal to the expected value.
public func endWith<S: Sequence, T: Equatable>(_ endingElement: T) -> Predicate<S>
where S.Iterator.Element == T {
return Predicate.fromDeprecatedClosure { actualExpression, failureMessage in
failureMessage.postfixMessage = "end with <\(endingElement)>"
if let actualValue = try actualExpression.evaluate() {
var actualGenerator = actualValue.makeIterator()
var lastItem: T?
var item: T?
repeat {
lastItem = item
item = actualGenerator.next()
} while(item != nil)
return lastItem == endingElement
}
return false
}.requireNonNil
}
/// A Nimble matcher that succeeds when the actual collection's last element
/// is equal to the expected object.
public func endWith(_ endingElement: Any) -> Predicate<NMBOrderedCollection> {
return Predicate.fromDeprecatedClosure { actualExpression, failureMessage in
failureMessage.postfixMessage = "end with <\(endingElement)>"
guard let collection = try actualExpression.evaluate() else { return false }
guard collection.count > 0 else { return false }
#if os(Linux)
guard let collectionValue = collection.object(at: collection.count - 1) as? NSObject else {
return false
}
#else
let collectionValue = collection.object(at: collection.count - 1) as AnyObject
#endif
return collectionValue.isEqual(endingElement)
}.requireNonNil
}
/// A Nimble matcher that succeeds when the actual string contains the expected substring
/// where the expected substring's location is the actual string's length minus the
/// expected substring's length.
public func endWith(_ endingSubstring: String) -> Predicate<String> {
return Predicate.fromDeprecatedClosure { actualExpression, failureMessage in
failureMessage.postfixMessage = "end with <\(endingSubstring)>"
if let collection = try actualExpression.evaluate() {
return collection.hasSuffix(endingSubstring)
}
return false
}.requireNonNil
}
#if os(macOS) || os(iOS) || os(tvOS) || os(watchOS)
extension NMBObjCMatcher {
@objc public class func endWithMatcher(_ expected: Any) -> NMBObjCMatcher {
return NMBObjCMatcher(canMatchNil: false) { actualExpression, failureMessage in
let actual = try! actualExpression.evaluate()
if (actual as? String) != nil {
let expr = actualExpression.cast { $0 as? String }
return try! endWith(expected as! String).matches(expr, failureMessage: failureMessage)
} else {
let expr = actualExpression.cast { $0 as? NMBOrderedCollection }
return try! endWith(expected).matches(expr, failureMessage: failureMessage)
}
}
}
}
#endif
| {
"pile_set_name": "Github"
} |
Filter 1: ON PK Fc 26 Hz Gain 7.1 dB Q 1.50
Filter 2: ON PK Fc 3484 Hz Gain 8.5 dB Q 1.82
Filter 3: ON PK Fc 7544 Hz Gain 5.1 dB Q 4.00
Filter 4: ON PK Fc 9715 Hz Gain 10.1 dB Q 1.73
Filter 5: ON PK Fc 16758 Hz Gain -5.9 dB Q 0.09
Filter 6: ON PK Fc 130 Hz Gain -3.1 dB Q 1.37
Filter 7: ON PK Fc 715 Hz Gain -1.3 dB Q 0.26
Filter 8: ON PK Fc 1261 Hz Gain 3.0 dB Q 2.46
Filter 9: ON PK Fc 4780 Hz Gain 3.0 dB Q 1.68
Filter 10: ON PK Fc 5261 Hz Gain -7.0 dB Q 4.11 | {
"pile_set_name": "Github"
} |
package com.grace.zhihunews.deliveryLayer;
import android.content.Context;
/**
* Created by Administrator on 2016/9/1.
*/
/**
* 数据交付层接口
*/
public interface INewsDetailProvider {
void getNewsDetail(int id);
}
| {
"pile_set_name": "Github"
} |
{
"CVE_data_meta": {
"ASSIGNER": "[email protected]",
"ID": "CVE-2018-17466",
"STATE": "PUBLIC"
},
"affects": {
"vendor": {
"vendor_data": [
{
"product": {
"product_data": [
{
"product_name": "Chrome",
"version": {
"version_data": [
{
"version_affected": "<",
"version_value": " 70.0.3538.67"
}
]
}
}
]
},
"vendor_name": "Google"
}
]
}
},
"data_format": "MITRE",
"data_type": "CVE",
"data_version": "4.0",
"description": {
"description_data": [
{
"lang": "eng",
"value": "Incorrect texture handling in Angle in Google Chrome prior to 70.0.3538.67 allowed a remote attacker to perform an out of bounds memory read via a crafted HTML page."
}
]
},
"problemtype": {
"problemtype_data": [
{
"description": [
{
"lang": "eng",
"value": "Out of bounds read"
}
]
}
]
},
"references": {
"reference_data": [
{
"name": "[debian-lts-announce] 20181213 [SECURITY] [DLA 1605-1] firefox-esr security update",
"refsource": "MLIST",
"url": "https://lists.debian.org/debian-lts-announce/2018/12/msg00002.html"
},
{
"name": "RHSA-2018:3833",
"refsource": "REDHAT",
"url": "https://access.redhat.com/errata/RHSA-2018:3833"
},
{
"name": "RHSA-2018:3831",
"refsource": "REDHAT",
"url": "https://access.redhat.com/errata/RHSA-2018:3831"
},
{
"name": "DSA-4362",
"refsource": "DEBIAN",
"url": "https://www.debian.org/security/2019/dsa-4362"
},
{
"name": "DSA-4330",
"refsource": "DEBIAN",
"url": "https://www.debian.org/security/2018/dsa-4330"
},
{
"name": "USN-3844-1",
"refsource": "UBUNTU",
"url": "https://usn.ubuntu.com/3844-1/"
},
{
"name": "106168",
"refsource": "BID",
"url": "http://www.securityfocus.com/bid/106168"
},
{
"name": "RHSA-2019:0159",
"refsource": "REDHAT",
"url": "https://access.redhat.com/errata/RHSA-2019:0159"
},
{
"name": "RHSA-2018:3004",
"refsource": "REDHAT",
"url": "https://access.redhat.com/errata/RHSA-2018:3004"
},
{
"name": "DSA-4354",
"refsource": "DEBIAN",
"url": "https://www.debian.org/security/2018/dsa-4354"
},
{
"name": "GLSA-201811-10",
"refsource": "GENTOO",
"url": "https://security.gentoo.org/glsa/201811-10"
},
{
"name": "USN-3868-1",
"refsource": "UBUNTU",
"url": "https://usn.ubuntu.com/3868-1/"
},
{
"name": "https://crbug.com/880906",
"refsource": "MISC",
"url": "https://crbug.com/880906"
},
{
"name": "https://chromereleases.googleblog.com/2018/10/stable-channel-update-for-desktop.html",
"refsource": "CONFIRM",
"url": "https://chromereleases.googleblog.com/2018/10/stable-channel-update-for-desktop.html"
},
{
"name": "105666",
"refsource": "BID",
"url": "http://www.securityfocus.com/bid/105666"
},
{
"name": "RHSA-2019:0160",
"refsource": "REDHAT",
"url": "https://access.redhat.com/errata/RHSA-2019:0160"
}
]
}
} | {
"pile_set_name": "Github"
} |
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// Preprocessed version of "boost/mpl/greater.hpp" header
// -- DO NOT modify by hand!
namespace boost { namespace mpl {
template<
typename Tag1
, typename Tag2
>
struct greater_impl
: if_c<
( BOOST_MPL_AUX_NESTED_VALUE_WKND(int, Tag1)
> BOOST_MPL_AUX_NESTED_VALUE_WKND(int, Tag2)
)
, aux::cast2nd_impl< greater_impl< Tag1,Tag1 >,Tag1, Tag2 >
, aux::cast1st_impl< greater_impl< Tag2,Tag2 >,Tag1, Tag2 >
>::type
{
};
/// for Digital Mars C++/compilers with no CTPS/TTP support
template<> struct greater_impl< na,na >
{
template< typename U1, typename U2 > struct apply
{
typedef apply type;
BOOST_STATIC_CONSTANT(int, value = 0);
};
};
template<> struct greater_impl< na,integral_c_tag >
{
template< typename U1, typename U2 > struct apply
{
typedef apply type;
BOOST_STATIC_CONSTANT(int, value = 0);
};
};
template<> struct greater_impl< integral_c_tag,na >
{
template< typename U1, typename U2 > struct apply
{
typedef apply type;
BOOST_STATIC_CONSTANT(int, value = 0);
};
};
template< typename T > struct greater_tag
{
typedef typename T::tag type;
};
template<
typename BOOST_MPL_AUX_NA_PARAM(N1)
, typename BOOST_MPL_AUX_NA_PARAM(N2)
>
struct greater
: greater_impl<
typename greater_tag<N1>::type
, typename greater_tag<N2>::type
>::template apply< N1,N2 >::type
{
BOOST_MPL_AUX_LAMBDA_SUPPORT(2, greater, (N1, N2))
};
BOOST_MPL_AUX_NA_SPEC2(2, 2, greater)
}}
namespace boost { namespace mpl {
template<>
struct greater_impl< integral_c_tag,integral_c_tag >
{
template< typename N1, typename N2 > struct apply
: bool_< ( BOOST_MPL_AUX_VALUE_WKND(N1)::value > BOOST_MPL_AUX_VALUE_WKND(N2)::value ) >
{
};
};
}}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<!--
(C) by Michael Peter Christen, mc (at) yacy.net
licensed under a Creative Commons Attribution 2.0 Generic License (CC-BY 2.0)
-->
<head>
<title id="title"></title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<!-- scary MS stuff ahead to prevent that IE runs in "IE=EmulateIE10" mode which cannot run our JS code for some reason -->
<meta http-equiv="X-UA-Compatible" content="IE=edge"/>
<!--
if you don't see the glyphicons in firefox and you opened this file from a local filesystem,
then a CORS security setting in firefox caused that.
No other browser is that strict and there is no work around. To see the glyphicons in firefox,
open about:config and set security.fileuri.strict_origin_policy to ‘false.’
-->
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico" />
<meta name="Content-Language" content="English, Englisch" />
<meta name="keywords" content="YaCy HTTP search engine spider indexer java network open free download Mac Windows Linux Software development" />
<meta name="description" content="Software HTTP Freeware Home Page" />
<meta name="copyright" content="Michael Christen et al." />
<!-- Ensure proper rendering and touch zooming on mobile devices -->
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap core CSS -->
<link href="../bootstrap/css/bootstrap.min.css" rel="stylesheet">
<script src="../bootstrap/js/jquery.min.js" type="text/javascript"></script>
<script src="../bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="navigation.js" type="text/javascript"></script>
<script src="../js/lib/underscore-min.js" type="text/javascript"></script>
<script src="../js/lib/backbone-min.js" type="text/javascript"></script>
<script src="../js/setup.js" type="text/javascript"></script> <!-- customization -->
<script src="../js/yacysearch.js" type="text/javascript"></script>
<!-- Custom styles for this template, i.e. navigation (move this to base.css) -->
<link href="../css/bootstrap-base.css" rel="stylesheet">
<!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!--[if lt IE 9]>
<script src="../bootstrap/js/html5shiv.js"></script>
<script src="../bootstrap/js/respond.min.js"></script>
<![endif]-->
<!-- old css styles -->
<link rel="stylesheet" type="text/css" media="all" href="../css/base.css" />
<link rel="stylesheet" type="text/css" media="screen" href="../css/style.css" />
<!--[if lt IE 6]>
<link rel="stylesheet" type="text/css" media="screen" href="../css/oldie.css" />
<![endif]-->
<!--[if lte IE 6.0]>
<link rel="stylesheet" type="text/css" media="screen" href="../css/ie6.css" />
<![endif]-->
<!--[if lte IE 7.0]>
<link rel="stylesheet" type="text/css" media="screen" href="../css/ie7.css" />
<![endif]-->
<script type="text/javascript">
function handleArrowKeys(evt) {
evt = (evt) ? evt : ((window.event) ? event : null);
if (evt) {
switch (evt.keyCode) {
case 9:
case 33:
window.location.href = document.getElementById("nextpage").href;
break;
case 34:
window.location.href = document.getElementById("prevpage").href;
break;
case 40:
}
}
}
document.onkeydown = handleArrowKeys;
</script>
<script type="text/javascript" src="../bootstrap/js/typeahead.jquery.min.js"></script>
<script type="text/javascript">
var suggestMatcher = function() {
return function opensearch(q, cb) {
$.getJSON(suggestUrl + "&q="+ q, function(data) {
var parsed = [];
for (var i = 0; i < data[0][1].length; i++) {
var row = data[0][1][i];
if (row) {
parsed[parsed.length] = {
data: [row],
value: row,
result: row
};
};
};
cb(parsed);
});
};
};
$(document).ready(function() {
$('#query').typeahead({hint:false,highlight:true,minLength:1}, {
name: 'states',
displayKey: 'value',
source: suggestMatcher()
});
});
</script>
<style type="text/css">.twitter-typeahead {margin: 0px;padding: 0px;top:2px;}</style> <!-- fix for input window -->
</head>
<body id="yacysearch" onLoad="document.searchform.query.focus();">
<!-- top navigation -->
<div class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a id="homepage" class="navbar-brand" href="http://yacy.net" style="position:absolute;top:-6px;display:inline;white-space:nowrap;">
<img id="logo" class="yacylogo" src="../images/YaCyLogo2011_60.png" alt="YaCy" style="height:auto; width:auto; max-width:200px; max-height:32px;vertical-align:middle">
</a>
<span id="topmenu" style="position:absolute;top:12px;left:80px;display:inline;white-space:nowrap;font-size:2em;"></span>
</div>
<div class="navbar-collapse collapse">
<ul class="nav navbar-nav navbar-right">
<li id="header_help" class="dropdown">
<a href="#" data-toggle="dropdown" class="dropdown-toggle"><span class="glyphicon glyphicon-question-sign"></span></a>
<ul class="dropdown-menu">
<li id="header_search"><a href="index.html?">Search Page</a></li>
<li id="header_profile"><a href="about.html?">About This Page</a></li>
<li id="header_usage"><a href="usage.html?">Usage of this page templates</a></li>
<li id="header_tutorial"><a href="http://yacy.net/tutorials/">YaCy Tutorials</a></li>
<li class="divider"></li>
<li id="header_download"><a href="http://yacy.net" target="_blank"><i>external</i> Download YaCy</a></li>
<li id="header_community"><a href="http://forum.yacy.de" target="_blank"><i>external</i> Community (Web Forums)</a></li>
<li id="header_wiki"><a href="http://wiki.yacy.de" target="_blank"><i>external</i> Project Wiki</a></li>
<li id="header_git"><a href="https://github.com/yacy/yacy_search_server/commits/master" target="_blank"><i>external</i> Git Repository</a></li>
<li id="header_bugs"><a href="http://bugs.yacy.net" target="_blank"><i>external</i> Bugtracker</a></li>
</ul>
</li>
</ul>
</div>
</div>
</div>
<div class="container-fluid">
<div class="starter-template">
<!-- body -->
</div> <!-- close the starter-template, to open a row -->
<div class="row">
<div class="col-sm-8 col-sm-offset-4 col-md-9 col-md-offset-3 main" id="main">
<form class="search small" name="searchform" action="#" method="get" accept-charset="UTF-8" style="position:fixed;top:6px;z-index:1052;max-width:500px;">
<div class="input-group">
<input name="query" id="query" type="text" class="form-control searchinput typeahead" size="40" maxlength="200" placeholder="Document Retrieval" value="" autofocus="autofocus" onFocus="this.select()" onclick="document.getElementById('Enter').innerHTML = 'search'"/>
<div class="input-group-btn">
<button id="Enter" name="Enter" class="btn btn-default" type="submit">search</button>
</div>
</div>
<input type="hidden" name="nav" value="filetype,protocol,hosts,language,authors,namespace,topics" />
<input type="hidden" name="maximumRecords" id="maximumRecords" value="10" />
<input type="hidden" name="startRecord" id="startRecord" value="0" />
<input type="hidden" name="layout" id="layout" value="paragraph" />
<input type="hidden" name="contentdom" id="contentdom" value="text" />
<input id="timezoneOffset" type="hidden" name="timezoneOffset" value="">
<script>document.getElementById("timezoneOffset").value = new Date().getTimezoneOffset();</script>
</form>
<!-- type the number of results and navigation bar -->
<div id="results"></div>
<div class="progress" id="progress">
<div class="progress-bar progress-bar-info" id="progressbar" role="progressbar" style="width:0%;">
<span style="position:absolute;padding-left:10px;display:block;text-align:left;width:100%;color:black;" id="progressbar_text"></span>
</div>
</div>
<script>
if (document.getElementById("progressbar").getAttribute('class') != "progress-bar progress-bar-success") {
document.getElementById("progressbar").setAttribute('style',"width:100%");
document.getElementById("progressbar").setAttribute('style',"transition:transform 0s;-webkit-transition:-webkit-transform 0s;");
document.getElementById("progressbar").setAttribute('class',"progress-bar progress-bar-success");
window.setTimeout(fadeOutBar, 500);
}
</script>
<!-- linklist begin -->
<div id="downloadscript" style="clear:both;"></div>
<div id="searchresults" style="clear:both;"></div>
<!-- linklist end -->
<span id="resNav" class="col-sm-12 col-md-12" style="display: inline;"></span>
</div> <!-- close main -->
<div class="col-sm-4 col-md-3 sidebar" id="sidebar">
<!-- navigation begin -->
<p class="navbutton"></p>
<div class="btn-group btn-group-justified">
<div class="btn-group btn-group-xs"><button type="button" id="sort_button_context" class="btn btn-default" onclick="document.getElementById('query').value=document.getElementById('query').value.replace(' /date','');document.searchform.submit();">Context Ranking</button></div>
<div class="btn-group btn-group-xs"><button type="button" id="sort_button_date" class="btn btn-default" onclick="document.getElementById('query').value=document.getElementById('query').value + ' /date';document.searchform.submit();">Sort by Date</button></div>
</div>
<div id="searchnavigation"></div>
<div id="downloadbutton"></div>
<!-- navigation end -->
</div> <!-- close sidebar -->
</div> <!-- close row -->
</div> <!-- close content container -->
<!-- modify content according to query url attributes -->
<script type="text/javascript">
var rowCollection;
function makeDownloadScript() {
document.getElementById("downloadscript").innerHTML = "<div><pre>" + rowCollection.resultScript(); + "</pre><br/></div>";
document.getElementById("downloadbutton").innerHTML = "<input id=\"downloadbutton\" type=\"button\" value=\"hide the download script\" onClick=\"hideDownloadScript();\"/>";
}
function hideDownloadScript() {
document.getElementById("downloadscript").innerHTML = "";
var dlb = document.getElementById("downloadbutton");
if (dlb) dlb.innerHTML = "<input type=\"button\" value=\"create a download script\" onClick=\"makeDownloadScript();\"/>";
}
function hideScriptButton() {
document.getElementById("downloadscript").innerHTML = "";
var dlb = document.getElementById("downloadbutton");
if (dlb) dlb.innerHTML = "";
}
document.getElementById("title").appendChild(document.createTextNode(headline));
document.getElementById("topmenu").appendChild(document.createTextNode(headline));
document.getElementById("homepage").setAttribute("href", homepage);
document.getElementById("logo").setAttribute("src", logo);
document.getElementById("query").setAttribute("placeholder", queryplaceholder);
// read command line options if present
var query = new RegExp("[\\?&]query=([^&#]*)").exec(window.location.href);
if (query == null || query.length < 2 || query[1].length == 0) {
// remove progress and sidebar if there is no result
document.getElementById("sidebar").innerHTML = "";
document.getElementById("progress").remove();
} else {
query = decodeURIComponent(query[1].replace(/\+/g, '%20'));
var startRecord = new RegExp("[\\?&]startRecord=([^&#]*)").exec(window.location.href);
if (startRecord == null) startRecord = 0; else startRecord = startRecord[1];
var maximumRecords = new RegExp("[\\?&]maximumRecords=([^&#]*)").exec(window.location.href);
if (maximumRecords == null) maximumRecords = 10; else maximumRecords = maximumRecords[1];
var layout = new RegExp("[\\?&]layout=([^&#]*)").exec(window.location.href);
if (layout == null) layout = "paragraph"; else layout = layout[1];
var contentdom = new RegExp("[\\?&]contentdom=([^&#]*)").exec(window.location.href);
if (contentdom == null) contentdom = "text"; else contentdom = contentdom[1];
document.getElementById("query").value=query;
document.getElementById("maximumRecords").value=maximumRecords;
document.getElementById("startRecord").value=startRecord;
document.getElementById("layout").value=layout;
//search for query
var resulthtml = document.getElementById("searchresults");
document.getElementById("progressbar_text").innerHTML = "loading...";
if (maximumRecords == "") maximumRecords = 10;
if (startRecord == "") startRecord = 0;
if (query == null) query == "";
var topicsModel = new ModifierModel({key:'topics:',query:query});
var siteModel = new ModifierModel({key:'site:',query:query});
var authorModel = new ModifierModel({key:'author:',query:query});
var filetypeModel = new ModifierModel({key:'filetype:',query:query});
var ext = filetypeModel.attributes.value;
var hl = (layout=="paragraph") ? 'true' : 'false';
var searchResult = new SearchModel({hl:hl,query:query,start:startRecord,rows:maximumRecords,servlet:"index.html",layout:layout,contentdom:contentdom});
searchResult.fetch({
timeout:10000,
success:function(searchResult) {
document.getElementById("progressbar_text").innerHTML = "parsing result...";
rowCollection = new RowCollection({servlet:searchResult.attributes.servlet});
rowCollection.add(searchResult.attributes.items);
var totalResults = searchResult.attributes.totalResults.replace(/[,.]/,"");
var navigation = searchResult.navigationCollection();
// update navigation
var topics = navigation.facet("topics");
var topicsfacet = navigation.facet("topics");
var topics = topicsfacet ? topicsfacet.facetElements() : {};
var authorfacet = navigation.facet("authors");
var author = authorfacet ? authorfacet.facetElements() : {};
for (var key in author) {if (query.indexOf("author:(" + key + ")") >= 0) delete author[key];}
var sitefacet = navigation.facet("domains");
var site = sitefacet ? sitefacet.facetElements() : {};
for (var key in site) {if (query.indexOf("site:" + key) >= 0) delete site[key];}
var filetypefacet = navigation.facet("filetypes");
var filetypes = filetypefacet ? filetypefacet.facetElements() : {};
for (var key in filetypes) {if (query.indexOf("filetype:" + key) >= 0) delete filetypes[key];}
if (layout == "images") {
if (totalResults == 0)
document.getElementById("progressbar_text").innerHTML = "no images found";
else
document.getElementById("progressbar_text").innerHTML = "found " + totalResults + " images, preparing...";
resulthtml.innerHTML = rowCollection.resultImages();
hideDownloadScript();
} else if (layout == "paragraph") {
if (totalResults == 0)
document.getElementById("progressbar_text").innerHTML = "no documents found";
else
document.getElementById("progressbar_text").innerHTML = "found " + totalResults + " documents, preparing result list...";
resulthtml.innerHTML = rowCollection.resultList();
hideScriptButton();
} else {
if (totalResults == 0)
document.getElementById("progressbar_text").innerHTML = "no documents found";
else
document.getElementById("progressbar_text").innerHTML = "found " + totalResults + " documents, preparing table...";
resulthtml.innerHTML = rowCollection.resultTable();
hideDownloadScript();
}
var searchnavigation = document.getElementById("searchnavigation");
searchnavigation.innerHTML += searchResult.renderNavigation("Result Layout");
if (topicsfacet) searchnavigation.innerHTML += topicsfacet.tagCloud(searchResult.attributes.servlet, topicsModel.attributes.key, topicsModel.attributes.value, 8, searchResult);
if (filetypefacet) searchnavigation.innerHTML += filetypefacet.facetBox(searchResult.attributes.servlet, filetypeModel.attributes.key, filetypeModel.attributes.value, 8, searchResult);
if (sitefacet) searchnavigation.innerHTML += sitefacet.facetBox(searchResult.attributes.servlet, siteModel.attributes.key, siteModel.attributes.value, 16, searchResult);
if (authorfacet) searchnavigation.innerHTML += authorfacet.facetBox(searchResult.attributes.servlet, authorModel.attributes.key, authorModel.attributes.value, 16, searchResult);
// generic facets
var genericfacets = navigation.genericfacets();
for (var i = 0; i < genericfacets.length; i++) {
var genericfacet = genericfacets[i];
var gModel = new ModifierModel({key:"/vocabulary/" + genericfacet + "/", query:query});
var gfacet = navigation.facet(genericfacet);
var g = gfacet ? gfacet.facetElements() : {};
for (var key in gfacet) {
if (query.indexOf("/vocabulary/" + genericfacet + "/" + key) >= 0) delete gfacet[key];
}
searchnavigation.innerHTML += gfacet.facetBox(searchResult.attributes.servlet, gModel.attributes.key, gModel.attributes.value, 16, searchResult);
}
// update progress and navigation
statistics(startRecord, maximumRecords, totalResults, window.location.href.replace(/&startRecord=[^&#]*/g,"")); //startRecord, maximumRecords, totalcount, navurlbase
},
error:function(searchResult) {
document.getElementById("progressbar_text").innerHTML = "The search service is not available right now. Please try again.";
}
});
// set buttons
if (document.getElementById('query').value.indexOf(" /date") == -1) {
document.getElementById("sort_button_context").setAttribute("class","btn btn-default active");
document.getElementById("sort_button_context").setAttribute("onclick","");
} else {
document.getElementById("sort_button_date").setAttribute("class","btn btn-default active");
document.getElementById("sort_button_date").setAttribute("onclick","");
}
}
</script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
<?php
/**
* Copyright © Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
declare(strict_types=1);
use Magento\Bundle\Model\Product\Price;
use Magento\Catalog\Api\Data\ProductInterfaceFactory;
use Magento\Catalog\Api\ProductRepositoryInterface;
use Magento\Catalog\Model\Product\Attribute\Source\Status;
use Magento\Catalog\Model\Product\Type;
use Magento\Catalog\Model\Product\Type\AbstractType;
use Magento\Catalog\Model\Product\Visibility;
use Magento\Store\Api\WebsiteRepositoryInterface;
use Magento\TestFramework\Bundle\Model\PrepareBundleLinks;
use Magento\TestFramework\Helper\Bootstrap;
use Magento\TestFramework\Workaround\Override\Fixture\Resolver;
Resolver::getInstance()->requireDataFixture('Magento/Bundle/_files/multiple_products.php');
$objectManager = Bootstrap::getObjectManager();
/** @var PrepareBundleLinks $prepareBundleLinks */
$prepareBundleLinks = $objectManager->get(PrepareBundleLinks::class);
/** @var ProductRepositoryInterface $productRepository */
$productRepository = $objectManager->create(ProductRepositoryInterface::class);
/** @var ProductInterfaceFactory $productFactory */
$productFactory = $objectManager->get(ProductInterfaceFactory::class);
/** @var WebsiteRepositoryInterface $websiteRepository */
$websiteRepository = $objectManager->get(WebsiteRepositoryInterface::class);
$defaultWebsiteId = $websiteRepository->get('base')->getId();
$bundleProduct = $productFactory->create();
$bundleProduct->setTypeId(Type::TYPE_BUNDLE)
->setAttributeSetId($bundleProduct->getDefaultAttributeSetId())
->setWebsiteIds([$defaultWebsiteId])
->setName('Bundle Product')
->setSku('fixed_bundle_product_with_special_price')
->setVisibility(Visibility::VISIBILITY_BOTH)
->setStatus(Status::STATUS_ENABLED)
->setStockData(
[
'use_config_manage_stock' => 1,
'qty' => 100,
'is_qty_decimal' => 0,
'is_in_stock' => 1,
]
)
->setPriceView(1)
->setSkuType(1)
->setWeightType(1)
->setPriceType(Price::PRICE_TYPE_FIXED)
->setPrice(50.0)
->setSpecialPrice(80)
->setShipmentType(AbstractType::SHIPMENT_TOGETHER);
$bundleOptionsData = [
[
'title' => 'Option 1',
'default_title' => 'Option 1',
'type' => 'radio',
'required' => 1,
'delete' => '',
],
];
$bundleSelectionsData = [
[
'sku' => 'simple1',
'selection_qty' => 1,
'selection_price_value' => 10,
'selection_price_type' => 0,
'selection_can_change_qty' => 1,
],
[
'sku' => 'simple2',
'selection_qty' => 1,
'selection_price_value' => 25,
'selection_price_type' => 1,
'selection_can_change_qty' => 1,
],
[
'sku' => 'simple3',
'selection_qty' => 1,
'selection_price_value' => 25,
'selection_price_type' => 0,
'selection_can_change_qty' => 1,
],
];
$bundleProduct = $prepareBundleLinks->execute($bundleProduct, $bundleOptionsData, [$bundleSelectionsData]);
$productRepository->save($bundleProduct);
| {
"pile_set_name": "Github"
} |
dueboot
=======
Based on https://github.com/neykov/armboot, and is a template for Arduino Due projects
Compiling
---------
Modify the Rakefile with your paths and ports, and then "rake burn" to upload to the Arduino.
Structure
---------
core.rs - sample program (blinks the led of the Arduino board)
arduino.rs - extern stubs for the core Arduino libraries
hardware/ - from a random Arduino IDE for OS X
Credits
-------
- armboot: https://github.com/neykov/armboot
- zero.rs: https://github.com/pcwalton/zero.rs
| {
"pile_set_name": "Github"
} |
#ifndef VCTRS_SLICE_ASSIGN_H
#define VCTRS_SLICE_ASSIGN_H
#include "owned.h"
struct vec_assign_opts {
bool assign_names;
bool ignore_outer_names;
struct vctrs_arg* x_arg;
struct vctrs_arg* value_arg;
};
SEXP vec_assign_opts(SEXP x, SEXP index, SEXP value,
const struct vec_assign_opts* opts);
SEXP vec_proxy_assign_opts(SEXP proxy, SEXP index, SEXP value,
const enum vctrs_owned owned,
const struct vec_assign_opts* opts);
SEXP chr_assign(SEXP out, SEXP index, SEXP value, const enum vctrs_owned owned);
SEXP list_assign(SEXP out, SEXP index, SEXP value, const enum vctrs_owned owned);
SEXP df_assign(SEXP x, SEXP index, SEXP value,
const enum vctrs_owned owned,
const struct vec_assign_opts* opts);
SEXP vec_assign_shaped(SEXP proxy, SEXP index, SEXP value,
const enum vctrs_owned owned,
const struct vec_assign_opts* opts);
#endif
| {
"pile_set_name": "Github"
} |
//
// ParseEnabled.h
// Monal
//
// Created by Anurodh Pokharel on 2/2/15.
// Copyright (c) 2015 Monal.im. All rights reserved.
//
#import "XMPPParser.h"
@interface ParseEnabled : XMPPParser
/**
supports resume on server
*/
@property (nonatomic, assign, readonly) BOOL resume;
@property (nonatomic, copy, readonly) NSString *streamID;
/**
server's max resumption time
*/
@property (nonatomic, copy, readonly) NSNumber *max;
/**
where to reconnect to -- not implemented
*/
@property (nonatomic, copy, readonly) NSString *location;
@end
| {
"pile_set_name": "Github"
} |
<?php
/**
* Magento
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://opensource.org/licenses/osl-3.0.php
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to [email protected] so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade Magento to newer
* versions in the future. If you wish to customize Magento for your
* needs please refer to http://www.magento.com for more information.
*
* @category Tests
* @package Tests_Functional
* @copyright Copyright (c) 2006-2020 Magento, Inc. (http://www.magento.com)
* @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0)
*/
namespace Mage\Admin\Test\Constraint;
use Mage\Admin\Test\Fixture\User;
use Magento\Mtf\Constraint\AbstractConstraint;
use Mage\Admin\Test\Page\Adminhtml\UserIndex;
/**
* Asserts that user is present in User Grid.
*/
class AssertUserInGrid extends AbstractConstraint
{
/**
* Constraint severeness.
*
* @var string
*/
protected $severeness = 'low';
/**
* Asserts that user is present in User Grid.
*
* @param UserIndex $userIndex
* @param User $user
* @param User $customAdmin
* @return void
*/
public function processAssert(
UserIndex $userIndex,
User $user,
User $customAdmin = null
) {
$adminUser = ($user->hasData('password') || $user->hasData('username')) ? $user : $customAdmin;
$filter = ['username' => $adminUser->getUsername()];
$userIndex->open();
\PHPUnit_Framework_Assert::assertTrue(
$userIndex->getUserGrid()->isRowVisible($filter),
'User with name \'' . $adminUser->getUsername() . '\' is absent in User grid.'
);
}
/**
* Return string representation of object.
*
* @return string
*/
public function toString()
{
return 'User is present in Users grid.';
}
}
| {
"pile_set_name": "Github"
} |
{
"_from": "react",
"_id": "[email protected]",
"_inBundle": false,
"_integrity": "sha1-uqhDTsZ4C96ZfNw4C3nNM7ljk98=",
"_location": "/react",
"_phantomChildren": {},
"_requested": {
"type": "tag",
"registry": true,
"raw": "react",
"name": "react",
"escapedName": "react",
"rawSpec": "",
"saveSpec": null,
"fetchSpec": "latest"
},
"_requiredBy": [
"#USER",
"/"
],
"_resolved": "https://registry.npmjs.org/react/-/react-0.14.9.tgz",
"_shasum": "baa8434ec6780bde997cdc380b79cd33b96393df",
"_spec": "react",
"_where": "/home/thomas/Code/dynamic-cdn-webpack-plugin/test/fixtures/multiple",
"browserify": {
"transform": [
"loose-envify"
]
},
"bugs": {
"url": "https://github.com/facebook/react/issues"
},
"bundleDependencies": false,
"dependencies": {
"create-react-class": "^15.6.0",
"fbjs": "^0.8.9",
"loose-envify": "^1.1.0",
"object-assign": "^4.1.0",
"prop-types": "^15.5.10"
},
"deprecated": false,
"description": "React is a JavaScript library for building user interfaces.",
"engines": {
"node": ">=0.10.0"
},
"files": [
"LICENSE",
"PATENTS",
"addons.js",
"react.js",
"addons/",
"dist/",
"lib/"
],
"homepage": "https://facebook.github.io/react/",
"keywords": [
"react"
],
"license": "BSD-3-Clause",
"main": "react.js",
"name": "react",
"repository": {
"type": "git",
"url": "git+https://github.com/facebook/react.git"
},
"version": "0.14.9"
}
| {
"pile_set_name": "Github"
} |
name: "BigPanda"
url: "https://bigpanda.io/"
| {
"pile_set_name": "Github"
} |
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# RAPPOR simulation library. Contains code for encoding simulated data and
# creating the map used to encode and decode reports.
library(glmnet)
library(parallel) # mclapply
SetOfStrings <- function(num_strings = 100) {
# Generates a set of strings for simulation purposes.
strs <- paste0("V_", as.character(1:num_strings))
strs
}
GetSampleProbs <- function(params) {
# Generate different underlying distributions for simulations purposes.
# Args:
# - params: a list describing the shape of the true distribution:
# c(num_strings, prop_nonzero_strings, decay_type,
# rate_exponetial).
nstrs <- params[[1]]
nonzero <- params[[2]]
decay <- params[[3]]
expo <- params[[4]]
background <- params[[5]]
probs <- rep(0, nstrs)
ind <- floor(nstrs * nonzero)
if (decay == "Linear") {
probs[1:ind] <- (ind:1) / sum(1:ind)
} else if (decay == "Constant") {
probs[1:ind] <- 1 / ind
} else if (decay == "Exponential") {
temp <- seq(0, nonzero, length.out = ind)
temp <- exp(-temp * expo)
temp <- temp + background
temp <- temp / sum(temp)
probs[1:ind] <- temp
} else {
stop('params[[4]] must be in c("Linear", "Exponenential", "Constant")')
}
probs
}
EncodeAll <- function(x, cohorts, map, params, num_cores = 1) {
# Encodes the ground truth into RAPPOR reports.
#
# Args:
# x: Observed strings for each report, Nx1 vector
# cohort: Cohort assignment for each report, Nx1 vector
# map: list of matrices encoding locations of hashes for each
# string, for each cohort
# params: System parameters
#
# Returns:
# RAPPOR reports for each piece of data.
p <- params$p
q <- params$q
f <- params$f
k <- params$k
qstar <- (1 - f / 2) * q + (f / 2) * p
pstar <- (1 - f / 2) * p + (f / 2) * q
candidates <- colnames(map[[1]])
if (!all(x %in% candidates)) {
stop("Some strings are not in the map. set(X) - set(candidates): ",
paste(setdiff(unique(x), candidates), collapse=" "), "\n")
}
bfs <- mapply(function(x, y) y[, x], x, map[cohorts], SIMPLIFY = FALSE,
USE.NAMES = FALSE)
reports <- mclapply(bfs, function(x) {
noise <- sample(0:1, k, replace = TRUE, prob = c(1 - pstar, pstar))
ind <- which(x)
noise[ind] <- sample(0:1, length(ind), replace = TRUE,
prob = c(1 - qstar, qstar))
noise
}, mc.cores = num_cores)
reports
}
CreateMap <- function(strs, params, generate_pos = TRUE, basic = FALSE) {
# Creates a list of 0/1 matrices corresponding to mapping between the strs and
# Bloom filters for each instance of the RAPPOR.
# Ex. for 3 strings, 2 instances, 1 hash function and Bloom filter of size 4,
# the result could look this:
# [[1]]
# 1 0 0 0
# 0 1 0 0
# 0 0 0 1
# [[2]]
# 0 1 0 0
# 0 0 0 1
# 0 0 1 0
#
# Args:
# strs: a vector of strings
# params: a list of parameters in the following format:
# (k, h, m, p, q, f).
# generate_pos: Tells whether to generate an object storing the
# positions of the nonzeros in the matrix
# basic: Tells whether to use basic RAPPOR (only works if h=1).
M <- length(strs)
map_by_cohort <- list()
k <- params$k
h <- params$h
m <- params$m
for (i in 1:m) {
if (basic && (h == 1) && (k == M)) {
ones <- 1:M
} else {
ones <- sample(1:k, M * h, replace = TRUE)
}
cols <- rep(1:M, each = h)
map_by_cohort[[i]] <- sparseMatrix(ones, cols, dims = c(k, M))
colnames(map_by_cohort[[i]]) <- strs
}
all_cohorts_map <- do.call("rBind", map_by_cohort)
if (generate_pos) {
map_pos <- t(apply(all_cohorts_map, 2, function(x) {
ind <- which(x == 1)
n <- length(ind)
if (n < h * m) {
ind <- c(ind, rep(NA, h * m - n))
}
ind
}))
} else {
map_pos <- NULL
}
list(map_by_cohort = map_by_cohort, all_cohorts_map = all_cohorts_map,
map_pos = map_pos)
}
GetSample <- function(N, strs, probs) {
# Sample for the strs population with distribution probs.
sample(strs, N, replace = TRUE, prob = probs)
}
GetTrueBits <- function(samp, map, params) {
# Convert sample generated by GetSample() to Bloom filters where mapping
# is defined in map.
# Output:
# - reports: a matrix of size [num_instances x size] where each row
# represents the number of times each bit in the Bloom filter
# was set for a particular instance.
# Note: reports[, 1] contains the same size for each instance.
N <- length(samp)
k <- params$k
m <- params$m
strs <- colnames(map[[1]])
reports <- matrix(0, m, k + 1)
inst <- sample(1:m, N, replace = TRUE)
for (i in 1:m) {
tab <- table(samp[inst == i])
tab2 <- rep(0, length(strs))
tab2[match(names(tab), strs)] <- tab
counts <- apply(map[[i]], 1, function(x) x * tab2)
# cat(length(tab2), dim(map[[i]]), dim(counts), "\n")
reports[i, ] <- c(sum(tab2), apply(counts, 2, sum))
}
reports
}
GetNoisyBits <- function(truth, params) {
# Applies RAPPOR to the Bloom filters.
# Args:
# - truth: a matrix generated by GetTrueBits().
k <- params$k
p <- params$p
q <- params$q
f <- params$f
rappors <- apply(truth, 1, function(x) {
# The following samples considering 4 cases:
# 1. Signal and we lie on the bit.
# 2. Signal and we tell the truth.
# 3. Noise and we lie.
# 4. Noise and we tell the truth.
# Lies when signal sampled from the binomial distribution.
lied_signal <- rbinom(k, x[-1], f)
# Remaining must be the non-lying bits when signal. Sampled with q.
truth_signal <- x[-1] - lied_signal
# Lies when there is no signal which happens x[1] - x[-1] times.
lied_nosignal <- rbinom(k, x[1] - x[-1], f)
# Trtuh when there's no signal. These are sampled with p.
truth_nosignal <- x[1] - x[-1] - lied_nosignal
# Total lies and sampling lies with 50/50 for either p or q.
lied <- lied_signal + lied_nosignal
lied_p <- rbinom(k, lied, .5)
lied_q <- lied - lied_p
# Generating the report where sampling of either p or q occurs.
rbinom(k, lied_q + truth_signal, q) + rbinom(k, lied_p + truth_nosignal, p)
})
cbind(truth[, 1], t(rappors))
}
GenerateSamples <- function(N = 10^5, params, pop_params, alpha = .05,
prop_missing = 0,
correction = "Bonferroni") {
# Simulate N reports with pop_params describing the population and
# params describing the RAPPOR configuration.
num_strings = pop_params[[1]]
strs <- SetOfStrings(num_strings)
probs <- GetSampleProbs(pop_params)
samp <- GetSample(N, strs, probs)
map <- CreateMap(strs, params)
truth <- GetTrueBits(samp, map$map_by_cohort, params)
rappors <- GetNoisyBits(truth, params)
strs_apprx <- strs
map_apprx <- map$all_cohorts_map
# Remove % of strings to simulate missing variables.
if (prop_missing > 0) {
ind <- which(probs > 0)
removed <- sample(ind, ceiling(prop_missing * length(ind)))
map_apprx <- map$all_cohorts_map[, -removed]
strs_apprx <- strs[-removed]
}
# Randomize the columns.
ind <- sample(1:length(strs_apprx), length(strs_apprx))
map_apprx <- map_apprx[, ind]
strs_apprx <- strs_apprx[ind]
fit <- Decode(rappors, map_apprx, params, alpha = alpha,
correction = correction)
# Add truth column.
fit$fit$Truth <- table(samp)[fit$fit$string]
fit$fit$Truth[is.na(fit$fit$Truth)] <- 0
fit$map <- map$map_by_cohort
fit$truth <- truth
fit$strs <- strs
fit$probs <- probs
fit
}
| {
"pile_set_name": "Github"
} |
import re
def parseDeviceId(id):
match = re.search('(#|\\\\)vid_([a-f0-9]{4})&pid_([a-f0-9]{4})(&|#|\\\\)', id, re.IGNORECASE)
return [int(match.group(i), 16) if match else None for i in [2, 3]]
| {
"pile_set_name": "Github"
} |
# repeating [![Build Status](https://travis-ci.org/sindresorhus/repeating.svg?branch=master)](https://travis-ci.org/sindresorhus/repeating)
> Repeat a string - fast
## Install
```
$ npm install --save repeating
```
## Usage
```js
const repeating = require('repeating');
repeating('unicorn ', 100);
//=> 'unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn '
```
## Related
- [repeating-cli](https://github.com/sindresorhus/repeating-cli) - CLI for this module
## License
MIT © [Sindre Sorhus](https://sindresorhus.com)
| {
"pile_set_name": "Github"
} |
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
#pragma once
#include "SystemComponentFixture.h"
#include <Tests/TestAssetCode/AnimGraphFactory.h>
namespace EMotionFX
{
class Actor;
class ActorInstance;
class AnimGraph;
class AnimGraphInstance;
class AnimGraphMotionNode;
class AnimGraphStateMachine;
class AnimGraphStateTransition;
class AnimGraphObject;
class MotionSet;
class AnimGraphTransitionConditionFixture
: public SystemComponentFixture
{
public:
void SetUp() override;
void TearDown() override;
virtual void AddNodesToAnimGraph()
{
}
TwoMotionNodeAnimGraph* GetAnimGraph() const
{
return m_animGraph.get();
}
AnimGraphInstance* GetAnimGraphInstance() const
{
return m_animGraphInstance;
}
protected:
AnimGraphStateMachine* m_stateMachine = nullptr;
AnimGraphInstance* m_animGraphInstance = nullptr;
AnimGraphMotionNode* m_motionNodeA = nullptr;
AnimGraphMotionNode* m_motionNodeB = nullptr;
AnimGraphStateTransition* m_transition = nullptr;
AZStd::unique_ptr<Actor> m_actor;
AZStd::unique_ptr<TwoMotionNodeAnimGraph> m_animGraph;
MotionSet* m_motionSet = nullptr;
ActorInstance* m_actorInstance = nullptr;
};
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="System.Reactive" version="4.0.0-preview00001" targetFramework="net462" />
<package id="System.Reactive.Core" version="4.0.0-preview00001" targetFramework="net462" />
<package id="System.Reactive.Interfaces" version="4.0.0-preview00001" targetFramework="net462" />
<package id="System.Reactive.Linq" version="4.0.0-preview00001" targetFramework="net462" />
<package id="System.Reactive.PlatformServices" version="4.0.0-preview00001" targetFramework="net462" />
<package id="System.Reactive.Windows.Threading" version="4.0.0-preview00001" targetFramework="net462" />
<package id="WhiteTie" version="1.3.16" targetFramework="net461" developmentDependency="true" />
</packages> | {
"pile_set_name": "Github"
} |
//
// TWTRAPIClient.h
//
// Copyright (c) 2015 Twitter. All rights reserved.
//
#import "TWTRDefines.h"
NS_ASSUME_NONNULL_BEGIN
FOUNDATION_EXPORT NSString * const TWTRTweetsNotLoadedKey;
@class TWTRUser;
@class TWTRTweet;
@class TWTRAuthConfig;
@class TWTRGuestSession;
@protocol TWTRAuthSession;
/**
* @name Completion Block Types
*/
/**
* Completion block called when the load user request succeeds or fails.
*
* @param user The Twitter User.
* @param error Error that will be set if the API request failed.
*/
typedef void (^TWTRLoadUserCompletion)(TWTRUser * __twtr_nullable user, NSError * __twtr_nullable error);
/**
* Completion block called when the load Tweet request succeeds or fails.
*
* @param tweet The Twitter Tweet.
* @param error Error that will be set if the API request failed.
*/
typedef void (^TWTRLoadTweetCompletion)(TWTRTweet * __twtr_nullable tweet, NSError * __twtr_nullable error);
/**
* Completion block called when the load Tweets request succeeds or fails.
*
* @param tweets Tweets that were successfully retrieved.
* @param error Error that will be set if the API request failed.
*/
typedef void (^TWTRLoadTweetsCompletion)(NSArray * __twtr_nullable tweets, NSError * __twtr_nullable error);
/**
* Completion block called when the network request succeeds or fails.
*
* @param response Metadata associated with the response to a URL load request.
* @param data Content data of the response.
* @param connectionError Error object describing the network error that occurred.
*/
typedef void (^TWTRNetworkCompletion)(NSURLResponse * __twtr_nullable response, NSData * __twtr_nullable data, NSError * __twtr_nullable connectionError);
/**
* Completion block called when a JSON request to the Twitter API succeeds or fails.
*
* @param response Metadata associated with the response to a URL load request.
* @param responseObject Content data of the response.
* @param error Error object describing the network error that occurred.
*/
typedef void (^TWTRJSONRequestCompletion)(NSURLResponse * __twtr_nullable response, id __twtr_nullable responseObject, NSError * __twtr_nullable error);
/**
* Client for consuming the Twitter REST API. Provides methods for common API requests, as well as the ability to create and send custom requests.
*/
@interface TWTRAPIClient : NSObject
/**
* @name Initialization
*/
- (instancetype)init __attribute__((unavailable(("Use one of the other `-init...` methods that allow you to provide signing parameters"))));
/**
* This method is deprecated since TwitterKit v1.4.0. To get an API client, use the one provided by the `Twitter` class.
*/
- (instancetype)initWithConsumerKey:(NSString *)consumerKey consumerSecret:(NSString *)consumerSecret __attribute__((deprecated));
/**
* @name Making Requests
*/
/**
* Returns a signed URL request.
*
* @param method Request method, GET, POST, PUT, DELETE, etc.
* @param URL Request URL. This is the full Twitter API URL. E.g. https://api.twitter.com/1.1/statuses/user_timeline.json
* @param parameters Request parameters.
* @param error Error that will be set if there was an error signing the request.
*/
- (NSURLRequest *)URLRequestWithMethod:(NSString *)method URL:(NSString *)URLString parameters:(NSDictionary *)parameters error:(NSError **)error;
/**
* Sends a Twitter request.
*
* @param request The request that will be sent asynchronously.
* @param completion Completion block to be called on response. Called on main queue.
*/
- (void)sendTwitterRequest:(NSURLRequest *)request completion:(TWTRNetworkCompletion)completion;
/**
* @name Common API Actions
*/
/**
* Loads a Twitter User.
*
* @param userIDString The Twitter user ID of the desired user.
* @param completion Completion block to be called on response. Called on main queue.
*/
- (void)loadUserWithID:(NSString *)userIDString completion:(TWTRLoadUserCompletion)completion;
/**
* Loads a single Tweet from the network or cache.
*
* @param tweetIDString The ID of the desired Tweet.
* @param completion Completion bock to be called on response. Called on main queue.
*/
- (void)loadTweetWithID:(NSString *)tweetIDString completion:(TWTRLoadTweetCompletion)completion;
/**
* Loads a series of Tweets in a batch. The completion block will be passed an array of zero or more
* Tweets that loaded successfully. If some Tweets fail to load the array will contain less Tweets than
* number of requested IDs. If any Tweets fail to load, the IDs of the Tweets that did not load will
* be provided in the userInfo dictionary property of the error parameter under `TWTRTweetsNotLoadedKey`.
*
* @param tweetIDStrings An array of Tweet IDs.
* @param completion Completion block to be called on response. Called on main queue.
*/
- (void)loadTweetsWithIDs:(NSArray *)tweetIDStrings completion:(TWTRLoadTweetsCompletion)completion;
@end
NS_ASSUME_NONNULL_END | {
"pile_set_name": "Github"
} |
"""An observation wrapper that augments observations by pixel values."""
import collections
import copy
import numpy as np
from gym import spaces
from gym import ObservationWrapper
STATE_KEY = 'state'
class PixelObservationWrapper(ObservationWrapper):
"""Augment observations by pixel values."""
def __init__(self,
env,
pixels_only=True,
render_kwargs=None,
pixel_keys=('pixels', )):
"""Initializes a new pixel Wrapper.
Args:
env: The environment to wrap.
pixels_only: If `True` (default), the original observation returned
by the wrapped environment will be discarded, and a dictionary
observation will only include pixels. If `False`, the
observation dictionary will contain both the original
observations and the pixel observations.
render_kwargs: Optional `dict` containing keyword arguments passed
to the `self.render` method.
pixel_keys: Optional custom string specifying the pixel
observation's key in the `OrderedDict` of observations.
Defaults to 'pixels'.
Raises:
ValueError: If `env`'s observation spec is not compatible with the
wrapper. Supported formats are a single array, or a dict of
arrays.
ValueError: If `env`'s observation already contains any of the
specified `pixel_keys`.
"""
super(PixelObservationWrapper, self).__init__(env)
if render_kwargs is None:
render_kwargs = {}
for key in pixel_keys:
render_kwargs.setdefault(key, {})
render_mode = render_kwargs[key].pop('mode', 'rgb_array')
assert render_mode == 'rgb_array', render_mode
render_kwargs[key]['mode'] = 'rgb_array'
wrapped_observation_space = env.observation_space
if isinstance(wrapped_observation_space, spaces.Box):
self._observation_is_dict = False
invalid_keys = set([STATE_KEY])
elif isinstance(wrapped_observation_space,
(spaces.Dict, collections.MutableMapping)):
self._observation_is_dict = True
invalid_keys = set(wrapped_observation_space.spaces.keys())
else:
raise ValueError("Unsupported observation space structure.")
if not pixels_only:
# Make sure that now keys in the `pixel_keys` overlap with
# `observation_keys`
overlapping_keys = set(pixel_keys) & set(invalid_keys)
if overlapping_keys:
raise ValueError("Duplicate or reserved pixel keys {!r}."
.format(overlapping_keys))
if pixels_only:
self.observation_space = spaces.Dict()
elif self._observation_is_dict:
self.observation_space = copy.deepcopy(wrapped_observation_space)
else:
self.observation_space = spaces.Dict()
self.observation_space.spaces[STATE_KEY] = wrapped_observation_space
# Extend observation space with pixels.
pixels_spaces = {}
for pixel_key in pixel_keys:
pixels = self.env.render(**render_kwargs[pixel_key])
if np.issubdtype(pixels.dtype, np.integer):
low, high = (0, 255)
elif np.issubdtype(pixels.dtype, np.float):
low, high = (-float('inf'), float('inf'))
else:
raise TypeError(pixels.dtype)
pixels_space = spaces.Box(
shape=pixels.shape, low=low, high=high, dtype=pixels.dtype)
pixels_spaces[pixel_key] = pixels_space
self.observation_space.spaces.update(pixels_spaces)
self._env = env
self._pixels_only = pixels_only
self._render_kwargs = render_kwargs
self._pixel_keys = pixel_keys
def observation(self, observation):
pixel_observation = self._add_pixel_observation(observation)
return pixel_observation
def _add_pixel_observation(self, wrapped_observation):
if self._pixels_only:
observation = collections.OrderedDict()
elif self._observation_is_dict:
observation = type(wrapped_observation)(wrapped_observation)
else:
observation = collections.OrderedDict()
observation[STATE_KEY] = wrapped_observation
pixel_observations = {
pixel_key: self.env.render(**self._render_kwargs[pixel_key])
for pixel_key in self._pixel_keys
}
observation.update(pixel_observations)
return observation
| {
"pile_set_name": "Github"
} |
apiVersion: v1
kind: Pod
metadata:
name: kube-controller-manager
namespace: kube-system
spec:
containers:
- command:
- /bin/sh
- -c
- /usr/local/bin/kube-controller-manager
--master=127.0.0.1:8080
--cluster-name=${INSTANCE_PREFIX}
--cluster-cidr=${CLUSTER_IP_RANGE}
--service-cluster-ip-range="${SERVICE_CLUSTER_IP_RANGE}"
--allocate-node-cidrs=true
--cloud-provider=gce
--service-account-private-key-file=/srv/kubernetes/server.key
--v=2
--root-ca-file=/srv/kubernetes/ca.crt
1>>/var/log/kube-controller-manager.log 2>&1
image: gcr.io/google_containers/kube-controller-manager:${KUBE_CONTROLLER_MANAGER_DOCKER_TAG}
imagePullPolicy: IfNotPresent
livenessProbe:
httpGet:
host: 127.0.0.1
path: /healthz
port: 10252
scheme: HTTP
initialDelaySeconds: 15
timeoutSeconds: 15
name: kube-controller-manager
resources:
limits:
cpu: 200m
requests:
cpu: 200m
volumeMounts:
- mountPath: /srv/kubernetes
name: srvkube
readOnly: true
- mountPath: /var/log/kube-controller-manager.log
name: logfile
- mountPath: /etc/ssl
name: etcssl
readOnly: true
- mountPath: /usr/share/ca-certificates
name: usrsharecacerts
readOnly: true
dnsPolicy: ClusterFirst
hostNetwork: true
restartPolicy: Always
terminationGracePeriodSeconds: 30
volumes:
- hostPath:
path: /srv/kubernetes
name: srvkube
- hostPath:
path: /var/log/kube-controller-manager.log
name: logfile
- hostPath:
path: /etc/ssl
name: etcssl
- hostPath:
path: /usr/share/ca-certificates
name: usrsharecacerts
| {
"pile_set_name": "Github"
} |
// Go support for Protocol Buffers - Google's data interchange format
//
// Copyright 2010 The Go Authors. All rights reserved.
// https://github.com/golang/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package proto
/*
* Support for message sets.
*/
import (
"errors"
)
// errNoMessageTypeID occurs when a protocol buffer does not have a message type ID.
// A message type ID is required for storing a protocol buffer in a message set.
var errNoMessageTypeID = errors.New("proto does not have a message type ID")
// The first two types (_MessageSet_Item and messageSet)
// model what the protocol compiler produces for the following protocol message:
// message MessageSet {
// repeated group Item = 1 {
// required int32 type_id = 2;
// required string message = 3;
// };
// }
// That is the MessageSet wire format. We can't use a proto to generate these
// because that would introduce a circular dependency between it and this package.
type _MessageSet_Item struct {
TypeId *int32 `protobuf:"varint,2,req,name=type_id"`
Message []byte `protobuf:"bytes,3,req,name=message"`
}
type messageSet struct {
Item []*_MessageSet_Item `protobuf:"group,1,rep"`
XXX_unrecognized []byte
// TODO: caching?
}
// Make sure messageSet is a Message.
var _ Message = (*messageSet)(nil)
// messageTypeIder is an interface satisfied by a protocol buffer type
// that may be stored in a MessageSet.
type messageTypeIder interface {
MessageTypeId() int32
}
func (ms *messageSet) find(pb Message) *_MessageSet_Item {
mti, ok := pb.(messageTypeIder)
if !ok {
return nil
}
id := mti.MessageTypeId()
for _, item := range ms.Item {
if *item.TypeId == id {
return item
}
}
return nil
}
func (ms *messageSet) Has(pb Message) bool {
return ms.find(pb) != nil
}
func (ms *messageSet) Unmarshal(pb Message) error {
if item := ms.find(pb); item != nil {
return Unmarshal(item.Message, pb)
}
if _, ok := pb.(messageTypeIder); !ok {
return errNoMessageTypeID
}
return nil // TODO: return error instead?
}
func (ms *messageSet) Marshal(pb Message) error {
msg, err := Marshal(pb)
if err != nil {
return err
}
if item := ms.find(pb); item != nil {
// reuse existing item
item.Message = msg
return nil
}
mti, ok := pb.(messageTypeIder)
if !ok {
return errNoMessageTypeID
}
mtid := mti.MessageTypeId()
ms.Item = append(ms.Item, &_MessageSet_Item{
TypeId: &mtid,
Message: msg,
})
return nil
}
func (ms *messageSet) Reset() { *ms = messageSet{} }
func (ms *messageSet) String() string { return CompactTextString(ms) }
func (*messageSet) ProtoMessage() {}
// Support for the message_set_wire_format message option.
func skipVarint(buf []byte) []byte {
i := 0
for ; buf[i]&0x80 != 0; i++ {
}
return buf[i+1:]
}
// unmarshalMessageSet decodes the extension map encoded in buf in the message set wire format.
// It is called by Unmarshal methods on protocol buffer messages with the message_set_wire_format option.
func unmarshalMessageSet(buf []byte, exts interface{}) error {
var m map[int32]Extension
switch exts := exts.(type) {
case *XXX_InternalExtensions:
m = exts.extensionsWrite()
case map[int32]Extension:
m = exts
default:
return errors.New("proto: not an extension map")
}
ms := new(messageSet)
if err := Unmarshal(buf, ms); err != nil {
return err
}
for _, item := range ms.Item {
id := *item.TypeId
msg := item.Message
// Restore wire type and field number varint, plus length varint.
// Be careful to preserve duplicate items.
b := EncodeVarint(uint64(id)<<3 | WireBytes)
if ext, ok := m[id]; ok {
// Existing data; rip off the tag and length varint
// so we join the new data correctly.
// We can assume that ext.enc is set because we are unmarshaling.
o := ext.enc[len(b):] // skip wire type and field number
_, n := DecodeVarint(o) // calculate length of length varint
o = o[n:] // skip length varint
msg = append(o, msg...) // join old data and new data
}
b = append(b, EncodeVarint(uint64(len(msg)))...)
b = append(b, msg...)
m[id] = Extension{enc: b}
}
return nil
}
| {
"pile_set_name": "Github"
} |
/*
* This file is part of the CmBacktrace Library.
*
* Copyright (c) 2016-2017, Armink, <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* 'Software'), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Function: Initialize function and other general function.
* Created on: 2016-12-15
*/
#include "utils/debug/CmBacktrace/cm_backtrace.h"
#include <stdbool.h>
#include <string.h>
#include <stdio.h>
#if __STDC_VERSION__ < 199901L
#error "must be C99 or higher. try to add '-std=c99' to compile parameters"
#endif
#if defined(__CC_ARM)
#define SECTION_START(_name_) _name_##$$Base
#define SECTION_END(_name_) _name_##$$Limit
#define IMAGE_SECTION_START(_name_) Image$$##_name_##$$Base
#define IMAGE_SECTION_END(_name_) Image$$##_name_##$$Limit
#define CSTACK_BLOCK_START(_name_) SECTION_START(_name_)
#define CSTACK_BLOCK_END(_name_) SECTION_END(_name_)
#define CODE_SECTION_START(_name_) IMAGE_SECTION_START(_name_)
#define CODE_SECTION_END(_name_) IMAGE_SECTION_END(_name_)
extern const int CSTACK_BLOCK_START(CMB_CSTACK_BLOCK_NAME);
extern const int CSTACK_BLOCK_END(CMB_CSTACK_BLOCK_NAME);
extern const int CODE_SECTION_START(CMB_CODE_SECTION_NAME);
extern const int CODE_SECTION_END(CMB_CODE_SECTION_NAME);
#elif defined(__ICCARM__)
#pragma section=CMB_CSTACK_BLOCK_NAME
#pragma section=CMB_CODE_SECTION_NAME
#elif defined(__GNUC__)
extern const int CMB_CSTACK_BLOCK_START;
extern const int CMB_CSTACK_BLOCK_END;
extern const int CMB_CODE_SECTION_START;
extern const int CMB_CODE_SECTION_END;
#else
#error "not supported compiler"
#endif
enum {
PRINT_FIRMWARE_INFO,
PRINT_ASSERT_ON_THREAD,
PRINT_ASSERT_ON_HANDLER,
PRINT_THREAD_STACK_INFO,
PRINT_MAIN_STACK_INFO,
PRINT_THREAD_STACK_OVERFLOW,
PRINT_MAIN_STACK_OVERFLOW,
PRINT_CALL_STACK_INFO,
PRINT_CALL_STACK_ERR,
PRINT_FAULT_ON_THREAD,
PRINT_FAULT_ON_HANDLER,
PRINT_REGS_TITLE,
PRINT_HFSR_VECTBL,
PRINT_MFSR_IACCVIOL,
PRINT_MFSR_DACCVIOL,
PRINT_MFSR_MUNSTKERR,
PRINT_MFSR_MSTKERR,
PRINT_MFSR_MLSPERR,
PRINT_BFSR_IBUSERR,
PRINT_BFSR_PRECISERR,
PRINT_BFSR_IMPREISERR,
PRINT_BFSR_UNSTKERR,
PRINT_BFSR_STKERR,
PRINT_BFSR_LSPERR,
PRINT_UFSR_UNDEFINSTR,
PRINT_UFSR_INVSTATE,
PRINT_UFSR_INVPC,
PRINT_UFSR_NOCP,
PRINT_UFSR_UNALIGNED,
PRINT_UFSR_DIVBYZERO0,
PRINT_DFSR_HALTED,
PRINT_DFSR_BKPT,
PRINT_DFSR_DWTTRAP,
PRINT_DFSR_VCATCH,
PRINT_DFSR_EXTERNAL,
PRINT_MMAR,
PRINT_BFAR,
};
static const char * const print_info[] = {
#if (CMB_PRINT_LANGUAGE == CMB_PRINT_LANGUAGE_ENGLISH)
[PRINT_FIRMWARE_INFO] = "Firmware name: %s, hardware version: %s, software version: %s",
[PRINT_ASSERT_ON_THREAD] = "Assert on thread %s",
[PRINT_ASSERT_ON_HANDLER] = "Assert on interrupt or bare metal(no OS) environment",
[PRINT_THREAD_STACK_INFO] = "===== Thread stack information =====",
[PRINT_MAIN_STACK_INFO] = "====== Main stack information ======",
[PRINT_THREAD_STACK_OVERFLOW] = "Error: Thread stack(%08x) was overflow",
[PRINT_MAIN_STACK_OVERFLOW] = "Error: Main stack(%08x) was overflow",
[PRINT_CALL_STACK_INFO] = "Show more call stack info by run: addr2line -e %s%s -a -f %.*s",
[PRINT_CALL_STACK_ERR] = "Dump call stack has an error",
[PRINT_FAULT_ON_THREAD] = "Fault on thread %s",
[PRINT_FAULT_ON_HANDLER] = "Fault on interrupt or bare metal(no OS) environment",
[PRINT_REGS_TITLE] = "=================== Registers information ====================",
[PRINT_HFSR_VECTBL] = "Hard fault is caused by failed vector fetch",
[PRINT_MFSR_IACCVIOL] = "Memory management fault is caused by instruction access violation",
[PRINT_MFSR_DACCVIOL] = "Memory management fault is caused by data access violation",
[PRINT_MFSR_MUNSTKERR] = "Memory management fault is caused by unstacking error",
[PRINT_MFSR_MSTKERR] = "Memory management fault is caused by stacking error",
[PRINT_MFSR_MLSPERR] = "Memory management fault is caused by floating-point lazy state preservation",
[PRINT_BFSR_IBUSERR] = "Bus fault is caused by instruction access violation",
[PRINT_BFSR_PRECISERR] = "Bus fault is caused by precise data access violation",
[PRINT_BFSR_IMPREISERR] = "Bus fault is caused by imprecise data access violation",
[PRINT_BFSR_UNSTKERR] = "Bus fault is caused by unstacking error",
[PRINT_BFSR_STKERR] = "Bus fault is caused by stacking error",
[PRINT_BFSR_LSPERR] = "Bus fault is caused by floating-point lazy state preservation",
[PRINT_UFSR_UNDEFINSTR] = "Usage fault is caused by attempts to execute an undefined instruction",
[PRINT_UFSR_INVSTATE] = "Usage fault is caused by attempts to switch to an invalid state (e.g., ARM)",
[PRINT_UFSR_INVPC] = "Usage fault is caused by attempts to do an exception with a bad value in the EXC_RETURN number",
[PRINT_UFSR_NOCP] = "Usage fault is caused by attempts to execute a coprocessor instruction",
[PRINT_UFSR_UNALIGNED] = "Usage fault is caused by indicates that an unaligned access fault has taken place",
[PRINT_UFSR_DIVBYZERO0] = "Usage fault is caused by Indicates a divide by zero has taken place (can be set only if DIV_0_TRP is set)",
[PRINT_DFSR_HALTED] = "Debug fault is caused by halt requested in NVIC",
[PRINT_DFSR_BKPT] = "Debug fault is caused by BKPT instruction executed",
[PRINT_DFSR_DWTTRAP] = "Debug fault is caused by DWT match occurred",
[PRINT_DFSR_VCATCH] = "Debug fault is caused by Vector fetch occurred",
[PRINT_DFSR_EXTERNAL] = "Debug fault is caused by EDBGRQ signal asserted",
[PRINT_MMAR] = "The memory management fault occurred address is %08x",
[PRINT_BFAR] = "The bus fault occurred address is %08x",
#elif (CMB_PRINT_LANGUAGE == CMB_PRINT_LANGUAGE_CHINESE)
[PRINT_FIRMWARE_INFO] = "固件名称:%s,硬件版本号:%s,软件版本号:%s",
[PRINT_ASSERT_ON_THREAD] = "在线程(%s)中发生断言",
[PRINT_ASSERT_ON_HANDLER] = "在中断或裸机环境下发生断言",
[PRINT_THREAD_STACK_INFO] = "=========== 线程堆栈信息 ===========",
[PRINT_MAIN_STACK_INFO] = "============ 主堆栈信息 ============",
[PRINT_THREAD_STACK_OVERFLOW] = "错误:线程栈(%08x)发生溢出",
[PRINT_MAIN_STACK_OVERFLOW] = "错误:主栈(%08x)发生溢出",
[PRINT_CALL_STACK_INFO] = "查看更多函数调用栈信息,请运行:addr2line -e %s%s -a -f %.*s",
[PRINT_CALL_STACK_ERR] = "获取函数调用栈失败",
[PRINT_FAULT_ON_THREAD] = "在线程(%s)中发生错误异常",
[PRINT_FAULT_ON_HANDLER] = "在中断或裸机环境下发生错误异常",
[PRINT_REGS_TITLE] = "========================= 寄存器信息 =========================",
[PRINT_HFSR_VECTBL] = "发生硬错误,原因:取中断向量时出错",
[PRINT_MFSR_IACCVIOL] = "发生存储器管理错误,原因:企图从不允许访问的区域取指令",
[PRINT_MFSR_DACCVIOL] = "发生存储器管理错误,原因:企图从不允许访问的区域读、写数据",
[PRINT_MFSR_MUNSTKERR] = "发生存储器管理错误,原因:出栈时企图访问不被允许的区域",
[PRINT_MFSR_MSTKERR] = "发生存储器管理错误,原因:入栈时企图访问不被允许的区域",
[PRINT_MFSR_MLSPERR] = "发生存储器管理错误,原因:惰性保存浮点状态时发生错误",
[PRINT_BFSR_IBUSERR] = "发生总线错误,原因:指令总线错误",
[PRINT_BFSR_PRECISERR] = "发生总线错误,原因:精确的数据总线错误",
[PRINT_BFSR_IMPREISERR] = "发生总线错误,原因:不精确的数据总线错误",
[PRINT_BFSR_UNSTKERR] = "发生总线错误,原因:出栈时发生错误",
[PRINT_BFSR_STKERR] = "发生总线错误,原因:入栈时发生错误",
[PRINT_BFSR_LSPERR] = "发生总线错误,原因:惰性保存浮点状态时发生错误",
[PRINT_UFSR_UNDEFINSTR] = "发生用法错误,原因:企图执行未定义指令",
[PRINT_UFSR_INVSTATE] = "发生用法错误,原因:试图切换到 ARM 状态",
[PRINT_UFSR_INVPC] = "发生用法错误,原因:无效的异常返回码",
[PRINT_UFSR_NOCP] = "发生用法错误,原因:企图执行协处理器指令",
[PRINT_UFSR_UNALIGNED] = "发生用法错误,原因:企图执行非对齐访问",
[PRINT_UFSR_DIVBYZERO0] = "发生用法错误,原因:企图执行除 0 操作",
[PRINT_DFSR_HALTED] = "发生调试错误,原因:NVIC 停机请求",
[PRINT_DFSR_BKPT] = "发生调试错误,原因:执行 BKPT 指令",
[PRINT_DFSR_DWTTRAP] = "发生调试错误,原因:数据监测点匹配",
[PRINT_DFSR_VCATCH] = "发生调试错误,原因:发生向量捕获",
[PRINT_DFSR_EXTERNAL] = "发生调试错误,原因:外部调试请求",
[PRINT_MMAR] = "发生存储器管理错误的地址:%08x",
[PRINT_BFAR] = "发生总线错误的地址:%08x",
#else
#error "CMB_PRINT_LANGUAGE defined error in 'cmb_cfg.h'"
#endif
};
static char fw_name[CMB_NAME_MAX] = {0};
static char hw_ver[CMB_NAME_MAX] = {0};
static char sw_ver[CMB_NAME_MAX] = {0};
static uint32_t main_stack_start_addr = 0;
static size_t main_stack_size = 0;
static uint32_t code_start_addr = 0;
static size_t code_size = 0;
static bool init_ok = false;
static char call_stack_info[CMB_CALL_STACK_MAX_DEPTH * (8 + 1)] = { 0 };
static bool on_fault = false;
static bool stack_is_overflow = false;
static struct cmb_hard_fault_regs regs;
#if (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M4) || (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M7)
static bool statck_has_fpu_regs = false;
#endif
static bool on_thread_before_fault = false;
/**
* library initialize
*/
void cm_backtrace_init(const char *firmware_name, const char *hardware_ver, const char *software_ver) {
strncpy(fw_name, firmware_name, CMB_NAME_MAX);
strncpy(hw_ver, hardware_ver, CMB_NAME_MAX);
strncpy(sw_ver, software_ver, CMB_NAME_MAX);
#if defined(__CC_ARM)
main_stack_start_addr = (uint32_t)&CSTACK_BLOCK_START(CMB_CSTACK_BLOCK_NAME);
main_stack_size = (uint32_t)&CSTACK_BLOCK_END(CMB_CSTACK_BLOCK_NAME) - main_stack_start_addr;
code_start_addr = (uint32_t)&CODE_SECTION_START(CMB_CODE_SECTION_NAME);
code_size = (uint32_t)&CODE_SECTION_END(CMB_CODE_SECTION_NAME) - code_start_addr;
#elif defined(__ICCARM__)
main_stack_start_addr = (uint32_t)__section_begin(CMB_CSTACK_BLOCK_NAME);
main_stack_size = (uint32_t)__section_end(CMB_CSTACK_BLOCK_NAME) - main_stack_start_addr;
code_start_addr = (uint32_t)__section_begin(CMB_CODE_SECTION_NAME);
code_size = (uint32_t)__section_end(CMB_CODE_SECTION_NAME) - code_start_addr;
#elif defined(__GNUC__)
main_stack_start_addr = (uint32_t)(&CMB_CSTACK_BLOCK_START);
main_stack_size = (uint32_t)(&CMB_CSTACK_BLOCK_END) - main_stack_start_addr;
code_start_addr = (uint32_t)(&CMB_CODE_SECTION_START);
code_size = (uint32_t)(&CMB_CODE_SECTION_END) - code_start_addr;
#else
#error "not supported compiler"
#endif
init_ok = true;
}
/**
* print firmware information, such as: firmware name, hardware version, software version
*/
void cm_backtrace_firmware_info(void) {
cmb_println(print_info[PRINT_FIRMWARE_INFO], fw_name, hw_ver, sw_ver);
}
#ifdef CMB_USING_OS_PLATFORM
/**
* Get current thread stack information
*
* @param sp stack current pointer
* @param start_addr stack start address
* @param size stack size
*/
static void get_cur_thread_stack_info(uint32_t sp, uint32_t *start_addr, size_t *size) {
CMB_ASSERT(start_addr);
CMB_ASSERT(size);
#if (CMB_OS_PLATFORM_TYPE == CMB_OS_PLATFORM_RTT)
*start_addr = (uint32_t) rt_thread_self()->stack_addr;
*size = rt_thread_self()->stack_size;
#elif (CMB_OS_PLATFORM_TYPE == CMB_OS_PLATFORM_UCOSII)
extern OS_TCB *OSTCBCur;
*start_addr = (uint32_t) OSTCBCur->OSTCBStkBottom;
*size = OSTCBCur->OSTCBStkSize * sizeof(OS_STK);
#elif (CMB_OS_PLATFORM_TYPE == CMB_OS_PLATFORM_UCOSIII)
extern OS_TCB *OSTCBCurPtr;
*start_addr = (uint32_t) OSTCBCurPtr->StkBasePtr;
*size = OSTCBCurPtr->StkSize * sizeof(CPU_STK_SIZE);
#elif (CMB_OS_PLATFORM_TYPE == CMB_OS_PLATFORM_FREERTOS)
*start_addr = (uint32_t)vTaskStackAddr();
*size = vTaskStackSize() * sizeof( StackType_t );
#endif
}
/**
* Get current thread name
*/
static const char *get_cur_thread_name(void) {
#if (CMB_OS_PLATFORM_TYPE == CMB_OS_PLATFORM_RTT)
return rt_thread_self()->name;
#elif (CMB_OS_PLATFORM_TYPE == CMB_OS_PLATFORM_UCOSII)
extern OS_TCB *OSTCBCur;
#if OS_TASK_NAME_SIZE > 0 || OS_TASK_NAME_EN > 0
return (const char *)OSTCBCur->OSTCBTaskName;
#else
return NULL;
#endif /* OS_TASK_NAME_SIZE > 0 || OS_TASK_NAME_EN > 0 */
#elif (CMB_OS_PLATFORM_TYPE == CMB_OS_PLATFORM_UCOSIII)
extern OS_TCB *OSTCBCurPtr;
return (const char *)OSTCBCurPtr->NamePtr;
#elif (CMB_OS_PLATFORM_TYPE == CMB_OS_PLATFORM_FREERTOS)
return vTaskName();
#endif
}
#endif /* CMB_USING_OS_PLATFORM */
#ifdef CMB_USING_DUMP_STACK_INFO
/**
* dump current stack information
*/
static void dump_stack(uint32_t stack_start_addr, size_t stack_size, uint32_t *stack_pointer) {
if (stack_is_overflow) {
if (on_thread_before_fault) {
cmb_println(print_info[PRINT_THREAD_STACK_OVERFLOW], stack_pointer);
} else {
cmb_println(print_info[PRINT_MAIN_STACK_OVERFLOW], stack_pointer);
}
if ((uint32_t) stack_pointer < stack_start_addr) {
stack_pointer = (uint32_t *) stack_start_addr;
} else if ((uint32_t) stack_pointer > stack_start_addr + stack_size) {
stack_pointer = (uint32_t *) (stack_start_addr + stack_size);
}
}
cmb_println(print_info[PRINT_THREAD_STACK_INFO]);
for (; (uint32_t) stack_pointer < stack_start_addr + stack_size; stack_pointer++) {
cmb_println(" addr: %08x data: %08x", stack_pointer, *stack_pointer);
}
cmb_println("====================================");
}
#endif /* CMB_USING_DUMP_STACK_INFO */
/**
* backtrace function call stack
*
* @param buffer call stack buffer
* @param size buffer size
* @param sp stack pointer
*
* @return depth
*/
size_t cm_backtrace_call_stack(uint32_t *buffer, size_t size, uint32_t sp) {
uint32_t stack_start_addr = main_stack_start_addr, pc;
size_t depth = 0, stack_size = main_stack_size;
bool regs_saved_lr_is_valid = false;
if (on_fault) {
if (!stack_is_overflow) {
/* first depth is PC */
buffer[depth++] = regs.saved.pc;
/* second depth is from LR, so need decrease a word to PC */
pc = regs.saved.lr - sizeof(size_t);
if ((pc >= code_start_addr) && (pc <= code_start_addr + code_size) && (depth < CMB_CALL_STACK_MAX_DEPTH)
&& (depth < size)) {
buffer[depth++] = pc;
regs_saved_lr_is_valid = true;
}
}
#ifdef CMB_USING_OS_PLATFORM
/* program is running on thread before fault */
if (on_thread_before_fault) {
get_cur_thread_stack_info(sp, &stack_start_addr, &stack_size);
}
} else {
/* OS environment */
if (cmb_get_sp() == cmb_get_psp()) {
get_cur_thread_stack_info(sp, &stack_start_addr, &stack_size);
}
#endif /* CMB_USING_OS_PLATFORM */
}
if (stack_is_overflow) {
if (sp < stack_start_addr) {
sp = stack_start_addr;
} else if (sp > stack_start_addr + stack_size) {
sp = stack_start_addr + stack_size;
}
}
/* copy called function address */
for (; sp < stack_start_addr + stack_size; sp += sizeof(size_t)) {
/* the *sp value may be LR, so need decrease a word to PC */
pc = *((uint32_t *) sp) - sizeof(size_t);
/* the Cortex-M using thumb instruction, so the pc must be an odd number */
if (pc % 2 == 0) {
continue;
}
if ((pc >= code_start_addr) && (pc <= code_start_addr + code_size) \
&& (depth < CMB_CALL_STACK_MAX_DEPTH) && (depth < size)) {
/* the second depth function may be already saved, so need ignore repeat */
if ((depth == 2) && regs_saved_lr_is_valid && (pc == buffer[1])) {
continue;
}
buffer[depth++] = pc;
}
}
return depth;
}
/**
* dump function call stack
*
* @param sp stack pointer
*/
static void print_call_stack(uint32_t sp) {
size_t i, cur_depth = 0;
uint32_t call_stack_buf[CMB_CALL_STACK_MAX_DEPTH] = {0};
cur_depth = cm_backtrace_call_stack(call_stack_buf, CMB_CALL_STACK_MAX_DEPTH, sp);
for (i = 0; i < cur_depth; i++) {
sprintf(call_stack_info + i * (8 + 1), "%08lx", call_stack_buf[i]);
call_stack_info[i * (8 + 1) + 8] = ' ';
}
if (cur_depth) {
cmb_println(print_info[PRINT_CALL_STACK_INFO], fw_name, CMB_ELF_FILE_EXTENSION_NAME, cur_depth * (8 + 1),
call_stack_info);
} else {
cmb_println(print_info[PRINT_CALL_STACK_ERR]);
}
}
/**
* backtrace for assert
*
* @param sp the stack pointer when on assert occurred
*/
void cm_backtrace_assert(uint32_t sp) {
CMB_ASSERT(init_ok);
#ifdef CMB_USING_OS_PLATFORM
uint32_t cur_stack_pointer = cmb_get_sp();
#endif
cmb_println("");
cm_backtrace_firmware_info();
#ifdef CMB_USING_OS_PLATFORM
/* OS environment */
if (cur_stack_pointer == cmb_get_msp()) {
cmb_println(print_info[PRINT_ASSERT_ON_HANDLER]);
#ifdef CMB_USING_DUMP_STACK_INFO
dump_stack(main_stack_start_addr, main_stack_size, (uint32_t *) sp);
#endif /* CMB_USING_DUMP_STACK_INFO */
} else if (cur_stack_pointer == cmb_get_psp()) {
cmb_println(print_info[PRINT_ASSERT_ON_THREAD], get_cur_thread_name());
#ifdef CMB_USING_DUMP_STACK_INFO
uint32_t stack_start_addr;
size_t stack_size;
get_cur_thread_stack_info(sp, &stack_start_addr, &stack_size);
dump_stack(stack_start_addr, stack_size, (uint32_t *) sp);
#endif /* CMB_USING_DUMP_STACK_INFO */
}
#else
/* bare metal(no OS) environment */
#ifdef CMB_USING_DUMP_STACK_INFO
dump_stack(main_stack_start_addr, main_stack_size, (uint32_t *) sp);
#endif /* CMB_USING_DUMP_STACK_INFO */
#endif /* CMB_USING_OS_PLATFORM */
print_call_stack(sp);
}
#if (CMB_CPU_PLATFORM_TYPE != CMB_CPU_ARM_CORTEX_M0)
/**
* fault diagnosis then print cause of fault
*/
static void fault_diagnosis(void) {
if (regs.hfsr.bits.VECTBL) {
cmb_println(print_info[PRINT_HFSR_VECTBL]);
}
if (regs.hfsr.bits.FORCED) {
/* Memory Management Fault */
if (regs.mfsr.value) {
if (regs.mfsr.bits.IACCVIOL) {
cmb_println(print_info[PRINT_MFSR_IACCVIOL]);
}
if (regs.mfsr.bits.DACCVIOL) {
cmb_println(print_info[PRINT_MFSR_DACCVIOL]);
}
if (regs.mfsr.bits.MUNSTKERR) {
cmb_println(print_info[PRINT_MFSR_MUNSTKERR]);
}
if (regs.mfsr.bits.MSTKERR) {
cmb_println(print_info[PRINT_MFSR_MSTKERR]);
}
#if (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M4) || (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M7)
if (regs.mfsr.bits.MLSPERR) {
cmb_println(print_info[PRINT_MFSR_MLSPERR]);
}
#endif
if (regs.mfsr.bits.MMARVALID) {
if (regs.mfsr.bits.IACCVIOL || regs.mfsr.bits.DACCVIOL) {
cmb_println(print_info[PRINT_MMAR], regs.mmar);
}
}
}
/* Bus Fault */
if (regs.bfsr.value) {
if (regs.bfsr.bits.IBUSERR) {
cmb_println(print_info[PRINT_BFSR_IBUSERR]);
}
if (regs.bfsr.bits.PRECISERR) {
cmb_println(print_info[PRINT_BFSR_PRECISERR]);
}
if (regs.bfsr.bits.IMPREISERR) {
cmb_println(print_info[PRINT_BFSR_IMPREISERR]);
}
if (regs.bfsr.bits.UNSTKERR) {
cmb_println(print_info[PRINT_BFSR_UNSTKERR]);
}
if (regs.bfsr.bits.STKERR) {
cmb_println(print_info[PRINT_BFSR_STKERR]);
}
#if (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M4) || (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M7)
if (regs.bfsr.bits.LSPERR) {
cmb_println(print_info[PRINT_BFSR_LSPERR]);
}
#endif
if (regs.bfsr.bits.BFARVALID) {
if (regs.bfsr.bits.PRECISERR) {
cmb_println(print_info[PRINT_BFAR], regs.bfar);
}
}
}
/* Usage Fault */
if (regs.ufsr.value) {
if (regs.ufsr.bits.UNDEFINSTR) {
cmb_println(print_info[PRINT_UFSR_UNDEFINSTR]);
}
if (regs.ufsr.bits.INVSTATE) {
cmb_println(print_info[PRINT_UFSR_INVSTATE]);
}
if (regs.ufsr.bits.INVPC) {
cmb_println(print_info[PRINT_UFSR_INVPC]);
}
if (regs.ufsr.bits.NOCP) {
cmb_println(print_info[PRINT_UFSR_NOCP]);
}
if (regs.ufsr.bits.UNALIGNED) {
cmb_println(print_info[PRINT_UFSR_UNALIGNED]);
}
if (regs.ufsr.bits.DIVBYZERO0) {
cmb_println(print_info[PRINT_UFSR_DIVBYZERO0]);
}
}
}
/* Debug Fault */
if (regs.hfsr.bits.DEBUGEVT) {
if (regs.dfsr.value) {
if (regs.dfsr.bits.HALTED) {
cmb_println(print_info[PRINT_DFSR_HALTED]);
}
if (regs.dfsr.bits.BKPT) {
cmb_println(print_info[PRINT_DFSR_BKPT]);
}
if (regs.dfsr.bits.DWTTRAP) {
cmb_println(print_info[PRINT_DFSR_DWTTRAP]);
}
if (regs.dfsr.bits.VCATCH) {
cmb_println(print_info[PRINT_DFSR_VCATCH]);
}
if (regs.dfsr.bits.EXTERNAL) {
cmb_println(print_info[PRINT_DFSR_EXTERNAL]);
}
}
}
}
#endif /* (CMB_CPU_PLATFORM_TYPE != CMB_CPU_ARM_CORTEX_M0) */
#if (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M4) || (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M7)
static uint32_t statck_del_fpu_regs(uint32_t fault_handler_lr, uint32_t sp) {
statck_has_fpu_regs = (fault_handler_lr & (1UL << 4)) == 0 ? true : false;
/* the stack has S0~S15 and FPSCR registers when statck_has_fpu_regs is true, double word align */
return statck_has_fpu_regs == true ? sp + sizeof(size_t) * 18 : sp;
}
#endif
/**
* backtrace for fault
* @note only call once
*
* @param fault_handler_lr the LR register value on fault handler
* @param fault_handler_sp the stack pointer on fault handler
*/
void cm_backtrace_fault(uint32_t fault_handler_lr, uint32_t fault_handler_sp) {
uint32_t stack_pointer = fault_handler_sp, saved_regs_addr = stack_pointer;
const char *regs_name[] = { "R0 ", "R1 ", "R2 ", "R3 ", "R12", "LR ", "PC ", "PSR" };
#ifdef CMB_USING_DUMP_STACK_INFO
uint32_t stack_start_addr = main_stack_start_addr;
size_t stack_size = main_stack_size;
#endif
CMB_ASSERT(init_ok);
/* only call once */
CMB_ASSERT(!on_fault);
on_fault = true;
cmb_println("");
cm_backtrace_firmware_info();
#ifdef CMB_USING_OS_PLATFORM
on_thread_before_fault = fault_handler_lr & (1UL << 2);
/* check which stack was used before (MSP or PSP) */
if (on_thread_before_fault) {
cmb_println(print_info[PRINT_FAULT_ON_THREAD], get_cur_thread_name() != NULL ? get_cur_thread_name() : "NO_NAME");
saved_regs_addr = stack_pointer = cmb_get_psp();
#ifdef CMB_USING_DUMP_STACK_INFO
get_cur_thread_stack_info(stack_pointer, &stack_start_addr, &stack_size);
#endif /* CMB_USING_DUMP_STACK_INFO */
} else {
cmb_println(print_info[PRINT_FAULT_ON_HANDLER]);
}
#else
/* bare metal(no OS) environment */
cmb_println(print_info[PRINT_FAULT_ON_HANDLER]);
#endif /* CMB_USING_OS_PLATFORM */
/* delete saved R0~R3, R12, LR,PC,xPSR registers space */
stack_pointer += sizeof(size_t) * 8;
#if (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M4) || (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M7)
stack_pointer = statck_del_fpu_regs(fault_handler_lr, stack_pointer);
#endif /* (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M4) || (CMB_CPU_PLATFORM_TYPE == CMB_CPU_ARM_CORTEX_M7) */
#ifdef CMB_USING_DUMP_STACK_INFO
/* check stack overflow */
if (stack_pointer < stack_start_addr || stack_pointer > stack_start_addr + stack_size) {
stack_is_overflow = true;
}
/* dump stack information */
dump_stack(stack_start_addr, stack_size, (uint32_t *) stack_pointer);
#endif /* CMB_USING_DUMP_STACK_INFO */
/* the stack frame may be get failed when it is overflow */
if (!stack_is_overflow) {
/* dump register */
cmb_println(print_info[PRINT_REGS_TITLE]);
regs.saved.r0 = ((uint32_t *)saved_regs_addr)[0]; // Register R0
regs.saved.r1 = ((uint32_t *)saved_regs_addr)[1]; // Register R1
regs.saved.r2 = ((uint32_t *)saved_regs_addr)[2]; // Register R2
regs.saved.r3 = ((uint32_t *)saved_regs_addr)[3]; // Register R3
regs.saved.r12 = ((uint32_t *)saved_regs_addr)[4]; // Register R12
regs.saved.lr = ((uint32_t *)saved_regs_addr)[5]; // Link register LR
regs.saved.pc = ((uint32_t *)saved_regs_addr)[6]; // Program counter PC
regs.saved.psr.value = ((uint32_t *)saved_regs_addr)[7]; // Program status word PSR
cmb_println(" %s: %08x %s: %08x %s: %08x %s: %08x", regs_name[0], regs.saved.r0,
regs_name[1], regs.saved.r1,
regs_name[2], regs.saved.r2,
regs_name[3], regs.saved.r3);
cmb_println(" %s: %08x %s: %08x %s: %08x %s: %08x", regs_name[4], regs.saved.r12,
regs_name[5], regs.saved.lr,
regs_name[6], regs.saved.pc,
regs_name[7], regs.saved.psr.value);
cmb_println("==============================================================");
}
/* the Cortex-M0 is not support fault diagnosis */
#if (CMB_CPU_PLATFORM_TYPE != CMB_CPU_ARM_CORTEX_M0)
regs.syshndctrl.value = CMB_SYSHND_CTRL; // System Handler Control and State Register
regs.mfsr.value = CMB_NVIC_MFSR; // Memory Fault Status Register
regs.mmar = CMB_NVIC_MMAR; // Memory Management Fault Address Register
regs.bfsr.value = CMB_NVIC_BFSR; // Bus Fault Status Register
regs.bfar = CMB_NVIC_BFAR; // Bus Fault Manage Address Register
regs.ufsr.value = CMB_NVIC_UFSR; // Usage Fault Status Register
regs.hfsr.value = CMB_NVIC_HFSR; // Hard Fault Status Register
regs.dfsr.value = CMB_NVIC_DFSR; // Debug Fault Status Register
regs.afsr = CMB_NVIC_AFSR; // Auxiliary Fault Status Register
fault_diagnosis();
#endif
print_call_stack(stack_pointer);
}
| {
"pile_set_name": "Github"
} |
.form-component {
display: flex;
flex-direction: column;
margin: var(--spacing-half) 0;
& > *:not(:last-child) {
margin-bottom: var(--spacing);
}
hr {
width: 100%;
border: none;
height: 1px;
border-bottom: var(--base-border);
}
}
| {
"pile_set_name": "Github"
} |
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>WebGL Shader Common Function Conformance Tests</title>
<link rel="stylesheet" href="../../../resources/js-test-style.css"/>
<script src="../../../js/js-test-pre.js"></script>
<script src="../../../js/webgl-test-utils.js"></script>
<script src="../../../closure-library/closure/goog/base.js"></script>
<script src="../../deqp-deps.js"></script>
<script>goog.require('functional.gles3.es3fShaderCommonFunctionTests');</script>
</head>
<body>
<div id="description"></div>
<div id="console"></div>
<canvas id="canvas" width="256" height="256"> </canvas>
<script>
var wtu = WebGLTestUtils;
var gl = wtu.create3DContext('canvas', null, 2);
functional.gles3.es3fShaderCommonFunctionTests.run(gl);
</script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!29 &1
OcclusionCullingSettings:
m_ObjectHideFlags: 0
serializedVersion: 2
m_OcclusionBakeSettings:
smallestOccluder: 5
smallestHole: 0.25
backfaceThreshold: 100
m_SceneGUID: 00000000000000000000000000000000
m_OcclusionCullingData: {fileID: 0}
--- !u!104 &2
RenderSettings:
m_ObjectHideFlags: 0
serializedVersion: 8
m_Fog: 0
m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1}
m_FogMode: 3
m_FogDensity: 0.01
m_LinearFogStart: 0
m_LinearFogEnd: 300
m_AmbientSkyColor: {r: 0.212, g: 0.227, b: 0.259, a: 1}
m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1}
m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1}
m_AmbientIntensity: 1
m_AmbientMode: 0
m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1}
m_SkyboxMaterial: {fileID: 10304, guid: 0000000000000000f000000000000000, type: 0}
m_HaloStrength: 0.5
m_FlareStrength: 1
m_FlareFadeSpeed: 3
m_HaloTexture: {fileID: 0}
m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0}
m_DefaultReflectionMode: 0
m_DefaultReflectionResolution: 128
m_ReflectionBounces: 1
m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1}
--- !u!157 &3
LightmapSettings:
m_ObjectHideFlags: 0
serializedVersion: 11
m_GIWorkflowMode: 1
m_GISettings:
serializedVersion: 2
m_BounceScale: 1
m_IndirectOutputScale: 1
m_AlbedoBoost: 1
m_TemporalCoherenceThreshold: 1
m_EnvironmentLightingMode: 0
m_EnableBakedLightmaps: 1
m_EnableRealtimeLightmaps: 1
m_LightmapEditorSettings:
serializedVersion: 9
m_Resolution: 2
m_BakeResolution: 40
m_TextureWidth: 1024
m_TextureHeight: 1024
m_AO: 0
m_AOMaxDistance: 1
m_CompAOExponent: 1
m_CompAOExponentDirect: 0
m_Padding: 2
m_LightmapParameters: {fileID: 0}
m_LightmapsBakeMode: 1
m_TextureCompression: 1
m_FinalGather: 0
m_FinalGatherFiltering: 1
m_FinalGatherRayCount: 256
m_ReflectionCompression: 2
m_MixedBakeMode: 1
m_BakeBackend: 0
m_PVRSampling: 1
m_PVRDirectSampleCount: 32
m_PVRSampleCount: 500
m_PVRBounces: 2
m_PVRFilterTypeDirect: 0
m_PVRFilterTypeIndirect: 0
m_PVRFilterTypeAO: 0
m_PVRFilteringMode: 0
m_PVRCulling: 1
m_PVRFilteringGaussRadiusDirect: 1
m_PVRFilteringGaussRadiusIndirect: 5
m_PVRFilteringGaussRadiusAO: 2
m_PVRFilteringAtrousPositionSigmaDirect: 0.5
m_PVRFilteringAtrousPositionSigmaIndirect: 2
m_PVRFilteringAtrousPositionSigmaAO: 1
m_LightingDataAsset: {fileID: 0}
m_UseShadowmask: 0
--- !u!196 &4
NavMeshSettings:
serializedVersion: 2
m_ObjectHideFlags: 0
m_BuildSettings:
serializedVersion: 2
agentTypeID: 0
agentRadius: 0.5
agentHeight: 2
agentSlope: 45
agentClimb: 0.4
ledgeDropHeight: 0
maxJumpAcrossDistance: 0
minRegionArea: 2
manualCellSize: 0
cellSize: 0.16666667
manualTileSize: 0
tileSize: 256
accuratePlacement: 0
debug:
m_Flags: 0
m_NavMeshData: {fileID: 0}
--- !u!1 &137962126
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 137962127}
- component: {fileID: 137962129}
- component: {fileID: 137962128}
m_Layer: 0
m_Name: Default
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &137962127
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 137962126}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.014, y: 0.361, z: -0.55}
m_LocalScale: {x: 0.014285714, y: 0.014285714, z: 0.049999997}
m_Children: []
m_Father: {fileID: 1541181469}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!102 &137962128
TextMesh:
serializedVersion: 3
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 137962126}
m_Text: Tap/Hold
m_OffsetZ: 0
m_CharacterSize: 1
m_LineSpacing: 1
m_Anchor: 1
m_Alignment: 1
m_TabSize: 4
m_FontSize: 64
m_FontStyle: 0
m_RichText: 1
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_Color:
serializedVersion: 2
rgba: 4286444430
--- !u!23 &137962129
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 137962126}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10100, guid: 0000000000000000e000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!1001 &235810537
Prefab:
m_ObjectHideFlags: 0
serializedVersion: 2
m_Modification:
m_TransformParent: {fileID: 0}
m_Modifications:
- target: {fileID: 4541142303025740, guid: d29bc40b7f3df26479d6a0aac211c355, type: 2}
propertyPath: m_LocalPosition.x
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4541142303025740, guid: d29bc40b7f3df26479d6a0aac211c355, type: 2}
propertyPath: m_LocalPosition.y
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4541142303025740, guid: d29bc40b7f3df26479d6a0aac211c355, type: 2}
propertyPath: m_LocalPosition.z
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4541142303025740, guid: d29bc40b7f3df26479d6a0aac211c355, type: 2}
propertyPath: m_LocalRotation.x
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4541142303025740, guid: d29bc40b7f3df26479d6a0aac211c355, type: 2}
propertyPath: m_LocalRotation.y
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4541142303025740, guid: d29bc40b7f3df26479d6a0aac211c355, type: 2}
propertyPath: m_LocalRotation.z
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4541142303025740, guid: d29bc40b7f3df26479d6a0aac211c355, type: 2}
propertyPath: m_LocalRotation.w
value: 1
objectReference: {fileID: 0}
- target: {fileID: 4541142303025740, guid: d29bc40b7f3df26479d6a0aac211c355, type: 2}
propertyPath: m_RootOrder
value: 0
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: d29bc40b7f3df26479d6a0aac211c355, type: 2}
m_IsPrefabParent: 0
--- !u!1001 &279023640
Prefab:
m_ObjectHideFlags: 0
serializedVersion: 2
m_Modification:
m_TransformParent: {fileID: 0}
m_Modifications:
- target: {fileID: 4000011792100794, guid: a611e772ef8ddf64d8106a9cbb70f31c, type: 2}
propertyPath: m_LocalPosition.x
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011792100794, guid: a611e772ef8ddf64d8106a9cbb70f31c, type: 2}
propertyPath: m_LocalPosition.y
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011792100794, guid: a611e772ef8ddf64d8106a9cbb70f31c, type: 2}
propertyPath: m_LocalPosition.z
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011792100794, guid: a611e772ef8ddf64d8106a9cbb70f31c, type: 2}
propertyPath: m_LocalRotation.x
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011792100794, guid: a611e772ef8ddf64d8106a9cbb70f31c, type: 2}
propertyPath: m_LocalRotation.y
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011792100794, guid: a611e772ef8ddf64d8106a9cbb70f31c, type: 2}
propertyPath: m_LocalRotation.z
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011792100794, guid: a611e772ef8ddf64d8106a9cbb70f31c, type: 2}
propertyPath: m_LocalRotation.w
value: 1
objectReference: {fileID: 0}
- target: {fileID: 4000011792100794, guid: a611e772ef8ddf64d8106a9cbb70f31c, type: 2}
propertyPath: m_RootOrder
value: 2
objectReference: {fileID: 0}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: a611e772ef8ddf64d8106a9cbb70f31c, type: 2}
m_IsPrefabParent: 0
--- !u!114 &279023641 stripped
MonoBehaviour:
m_PrefabParentObject: {fileID: 114611684728110934, guid: a611e772ef8ddf64d8106a9cbb70f31c,
type: 2}
m_PrefabInternal: {fileID: 279023640}
m_Script: {fileID: 11500000, guid: 0decd33ba8702954885a62b5bc1a778e, type: 3}
--- !u!1001 &322386181
Prefab:
m_ObjectHideFlags: 0
serializedVersion: 2
m_Modification:
m_TransformParent: {fileID: 0}
m_Modifications:
- target: {fileID: 4000011656901714, guid: 3eddd1c29199313478dd3f912bfab2ab, type: 2}
propertyPath: m_LocalPosition.x
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011656901714, guid: 3eddd1c29199313478dd3f912bfab2ab, type: 2}
propertyPath: m_LocalPosition.y
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011656901714, guid: 3eddd1c29199313478dd3f912bfab2ab, type: 2}
propertyPath: m_LocalPosition.z
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011656901714, guid: 3eddd1c29199313478dd3f912bfab2ab, type: 2}
propertyPath: m_LocalRotation.x
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011656901714, guid: 3eddd1c29199313478dd3f912bfab2ab, type: 2}
propertyPath: m_LocalRotation.y
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011656901714, guid: 3eddd1c29199313478dd3f912bfab2ab, type: 2}
propertyPath: m_LocalRotation.z
value: 0
objectReference: {fileID: 0}
- target: {fileID: 4000011656901714, guid: 3eddd1c29199313478dd3f912bfab2ab, type: 2}
propertyPath: m_LocalRotation.w
value: 1
objectReference: {fileID: 0}
- target: {fileID: 4000011656901714, guid: 3eddd1c29199313478dd3f912bfab2ab, type: 2}
propertyPath: m_RootOrder
value: 1
objectReference: {fileID: 0}
- target: {fileID: 114742747811649402, guid: 3eddd1c29199313478dd3f912bfab2ab,
type: 2}
propertyPath: Cursor
value:
objectReference: {fileID: 279023641}
m_RemovedComponents: []
m_ParentPrefab: {fileID: 100100000, guid: 3eddd1c29199313478dd3f912bfab2ab, type: 2}
m_IsPrefabParent: 0
--- !u!1 &481182369
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 481182370}
- component: {fileID: 481182373}
- component: {fileID: 481182372}
- component: {fileID: 481182371}
- component: {fileID: 481182374}
m_Layer: 0
m_Name: Timer
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &481182370
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 481182369}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: -0.15, z: -0.52}
m_LocalScale: {x: 0, y: 0.05, z: 1}
m_Children: []
m_Father: {fileID: 1541181469}
m_RootOrder: 5
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!23 &481182371
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 481182369}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: 685f5d363eec7354b8d5dfbddbc86e28, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!64 &481182372
MeshCollider:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 481182369}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Convex: 0
m_InflateMesh: 0
m_SkinWidth: 0.01
m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &481182373
MeshFilter:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 481182369}
m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0}
--- !u!114 &481182374
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 481182369}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 5e011ee45226e0c43b09a26d1c475cce, type: 3}
m_Name:
m_EditorClassIdentifier:
Button: {fileID: 1541181465}
--- !u!1 &500814168
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 500814169}
- component: {fileID: 500814172}
- component: {fileID: 500814171}
- component: {fileID: 500814170}
- component: {fileID: 500814173}
m_Layer: 0
m_Name: Effect
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &500814169
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 500814168}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0.05}
m_LocalScale: {x: 1.1, y: 1.1, z: 1}
m_Children: []
m_Father: {fileID: 565675569}
m_RootOrder: 4
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!23 &500814170
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 500814168}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: 685f5d363eec7354b8d5dfbddbc86e28, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!65 &500814171
BoxCollider:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 500814168}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 0
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!33 &500814172
MeshFilter:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 500814168}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!114 &500814173
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 500814168}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: a60b4c2407cc2d848be7c75de1e789e0, type: 3}
m_Name:
m_EditorClassIdentifier:
InteractiveHost: {fileID: 0}
EffectColors:
- {r: 0.05490196, g: 0.65882355, b: 0.6, a: 1}
- {r: 0.9019608, g: 0.9019608, b: 0.9019608, a: 1}
EffectScale:
- {x: 1.1, y: 1.1, z: 1}
- {x: 1.15, y: 1.15, z: 1}
EffectPosition:
- {x: 0, y: 0, z: 0.05}
- {x: 0, y: 0, z: 0.6}
--- !u!1 &554237388
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 554237389}
- component: {fileID: 554237391}
- component: {fileID: 554237390}
m_Layer: 0
m_Name: InteractiveToggle
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &554237389
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 554237388}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.01399998, y: 0.7714286, z: -0.5500001}
m_LocalScale: {x: 0.014285712, y: 0.014285712, z: 0.049999997}
m_Children: []
m_Father: {fileID: 565675569}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!102 &554237390
TextMesh:
serializedVersion: 3
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 554237388}
m_Text: InteractiveToggle
m_OffsetZ: 0
m_CharacterSize: 1
m_LineSpacing: 1
m_Anchor: 1
m_Alignment: 1
m_TabSize: 4
m_FontSize: 84
m_FontStyle: 0
m_RichText: 1
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_Color:
serializedVersion: 2
rgba: 4293322470
--- !u!23 &554237391
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 554237388}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10100, guid: 0000000000000000e000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!1 &565675565
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 565675569}
- component: {fileID: 565675568}
- component: {fileID: 565675567}
- component: {fileID: 565675566}
- component: {fileID: 565675571}
m_Layer: 0
m_Name: InteractableToggle
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!23 &565675566
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 565675565}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: a01f482e2ab5c894ba4e1e52a3cf351c, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!65 &565675567
BoxCollider:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 565675565}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!33 &565675568
MeshFilter:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 565675565}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &565675569
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 565675565}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.22, y: 0, z: 1}
m_LocalScale: {x: 0.35, y: 0.35, z: 0.1}
m_Children:
- {fileID: 554237389}
- {fileID: 574450914}
- {fileID: 683248269}
- {fileID: 1315320361}
- {fileID: 500814169}
m_Father: {fileID: 1819360065}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &565675571
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 565675565}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: ab5a4b612c7bbb847b8e66c900eaa96e, type: 3}
m_Name:
m_EditorClassIdentifier:
ParentObject: {fileID: 0}
IsEnabled: 1
DetectHold: 0
HoldTime: 0.02
RollOffTime: 0.02
Keyword:
KeywordRequiresGaze: 1
OnSelectEvents:
m_PersistentCalls:
m_Calls: []
m_TypeName: UnityEngine.Events.UnityEvent, UnityEngine.CoreModule, Version=0.0.0.0,
Culture=neutral, PublicKeyToken=null
OnDownEvent:
m_PersistentCalls:
m_Calls: []
m_TypeName: UnityEngine.Events.UnityEvent, UnityEngine.CoreModule, Version=0.0.0.0,
Culture=neutral, PublicKeyToken=null
OnHoldEvent:
m_PersistentCalls:
m_Calls: []
m_TypeName: UnityEngine.Events.UnityEvent, UnityEngine.CoreModule, Version=0.0.0.0,
Culture=neutral, PublicKeyToken=null
AllowSelection: 1
AllowDeselect: 1
HasSelection: 0
PassiveMode: 0
OnSelection:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 574450915}
m_MethodName: set_text
m_Mode: 5
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument: Is Selected
m_BoolArgument: 0
m_CallState: 2
m_TypeName: UnityEngine.Events.UnityEvent, UnityEngine.CoreModule, Version=0.0.0.0,
Culture=neutral, PublicKeyToken=null
OnDeselection:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 574450915}
m_MethodName: set_text
m_Mode: 5
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument: Not Selected
m_BoolArgument: 0
m_CallState: 2
m_TypeName: UnityEngine.Events.UnityEvent, UnityEngine.CoreModule, Version=0.0.0.0,
Culture=neutral, PublicKeyToken=null
--- !u!1 &574450913
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 574450914}
- component: {fileID: 574450916}
- component: {fileID: 574450915}
m_Layer: 0
m_Name: Selected
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &574450914
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 574450913}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.014, y: 0.361, z: -0.55}
m_LocalScale: {x: 0.014285714, y: 0.014285714, z: 0.049999997}
m_Children: []
m_Father: {fileID: 565675569}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!102 &574450915
TextMesh:
serializedVersion: 3
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 574450913}
m_Text: Not Selected
m_OffsetZ: 0
m_CharacterSize: 1
m_LineSpacing: 1
m_Anchor: 1
m_Alignment: 1
m_TabSize: 4
m_FontSize: 64
m_FontStyle: 0
m_RichText: 1
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_Color:
serializedVersion: 2
rgba: 4286444430
--- !u!23 &574450916
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 574450913}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10100, guid: 0000000000000000e000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!1 &588563753
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 588563754}
- component: {fileID: 588563756}
- component: {fileID: 588563755}
m_Layer: 0
m_Name: Interactive
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &588563754
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 588563753}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.013999981, y: 0.7714286, z: -0.5500001}
m_LocalScale: {x: 0.014285712, y: 0.014285712, z: 0.049999997}
m_Children: []
m_Father: {fileID: 1541181469}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!102 &588563755
TextMesh:
serializedVersion: 3
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 588563753}
m_Text: Interactive
m_OffsetZ: 0
m_CharacterSize: 1
m_LineSpacing: 1
m_Anchor: 1
m_Alignment: 1
m_TabSize: 4
m_FontSize: 84
m_FontStyle: 0
m_RichText: 1
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_Color:
serializedVersion: 2
rgba: 4293322470
--- !u!23 &588563756
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 588563753}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10100, guid: 0000000000000000e000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!1 &683248268
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 683248269}
- component: {fileID: 683248271}
- component: {fileID: 683248270}
- component: {fileID: 683248272}
m_Layer: 0
m_Name: Enabled
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &683248269
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 683248268}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.014, y: -0.297, z: -0.55}
m_LocalScale: {x: 0.014285714, y: 0.014285714, z: 0.049999997}
m_Children: []
m_Father: {fileID: 565675569}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!102 &683248270
TextMesh:
serializedVersion: 3
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 683248268}
m_Text: (Enabled)
m_OffsetZ: 0
m_CharacterSize: 1
m_LineSpacing: 1
m_Anchor: 1
m_Alignment: 1
m_TabSize: 4
m_FontSize: 64
m_FontStyle: 0
m_RichText: 1
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_Color:
serializedVersion: 2
rgba: 4286444687
--- !u!23 &683248271
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 683248268}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10100, guid: 0000000000000000e000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!114 &683248272
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 683248268}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 01cf07a5255bd1f4ea26c9d43d714019, type: 3}
m_Name:
m_EditorClassIdentifier:
InteractiveHost: {fileID: 0}
TextField: {fileID: 683248270}
--- !u!1 &1079276087
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 1079276088}
- component: {fileID: 1079276091}
- component: {fileID: 1079276090}
- component: {fileID: 1079276089}
m_Layer: 0
m_Name: State
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &1079276088
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1079276087}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.013999981, y: 0.067, z: -0.5500001}
m_LocalScale: {x: 0.014285713, y: 0.014285713, z: 0.049999997}
m_Children: []
m_Father: {fileID: 1541181469}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &1079276089
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1079276087}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 9f48cf8bb87d0e4499282ba6c784e03a, type: 3}
m_Name:
m_EditorClassIdentifier:
InteractiveHost: {fileID: 0}
TextField: {fileID: 1079276090}
--- !u!102 &1079276090
TextMesh:
serializedVersion: 3
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1079276087}
m_Text: Default
m_OffsetZ: 0
m_CharacterSize: 1
m_LineSpacing: 1
m_Anchor: 1
m_Alignment: 1
m_TabSize: 4
m_FontSize: 84
m_FontStyle: 0
m_RichText: 1
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_Color:
serializedVersion: 2
rgba: 4293322470
--- !u!23 &1079276091
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1079276087}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10100, guid: 0000000000000000e000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!1 &1133174989
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 1133174990}
- component: {fileID: 1133174992}
- component: {fileID: 1133174991}
- component: {fileID: 1133174993}
m_Layer: 0
m_Name: Enabled
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &1133174990
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1133174989}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.014, y: -0.297, z: -0.55}
m_LocalScale: {x: 0.014285714, y: 0.014285714, z: 0.049999997}
m_Children: []
m_Father: {fileID: 1541181469}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!102 &1133174991
TextMesh:
serializedVersion: 3
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1133174989}
m_Text: (Enabled)
m_OffsetZ: 0
m_CharacterSize: 1
m_LineSpacing: 1
m_Anchor: 1
m_Alignment: 1
m_TabSize: 4
m_FontSize: 64
m_FontStyle: 0
m_RichText: 1
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_Color:
serializedVersion: 2
rgba: 4286444687
--- !u!23 &1133174992
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1133174989}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10100, guid: 0000000000000000e000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!114 &1133174993
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1133174989}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 01cf07a5255bd1f4ea26c9d43d714019, type: 3}
m_Name:
m_EditorClassIdentifier:
InteractiveHost: {fileID: 0}
TextField: {fileID: 1133174991}
--- !u!1 &1315320360
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 1315320361}
- component: {fileID: 1315320363}
- component: {fileID: 1315320362}
- component: {fileID: 1315320364}
m_Layer: 0
m_Name: Default
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &1315320361
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1315320360}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0.014, y: 0.067, z: -0.55}
m_LocalScale: {x: 0.014285714, y: 0.014285714, z: 0.049999997}
m_Children: []
m_Father: {fileID: 565675569}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!102 &1315320362
TextMesh:
serializedVersion: 3
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1315320360}
m_Text: Default
m_OffsetZ: 0
m_CharacterSize: 1
m_LineSpacing: 1
m_Anchor: 1
m_Alignment: 1
m_TabSize: 4
m_FontSize: 84
m_FontStyle: 0
m_RichText: 1
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_Color:
serializedVersion: 2
rgba: 4293322470
--- !u!23 &1315320363
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1315320360}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 10100, guid: 0000000000000000e000000000000000, type: 0}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!114 &1315320364
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1315320360}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 9f48cf8bb87d0e4499282ba6c784e03a, type: 3}
m_Name:
m_EditorClassIdentifier:
InteractiveHost: {fileID: 0}
TextField: {fileID: 1315320362}
--- !u!1 &1541181464
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 1541181469}
- component: {fileID: 1541181468}
- component: {fileID: 1541181467}
- component: {fileID: 1541181466}
- component: {fileID: 1541181465}
m_Layer: 0
m_Name: Interactable
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &1541181465
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1541181464}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 4ffc62be7bfe7064782e89be291ed835, type: 3}
m_Name:
m_EditorClassIdentifier:
ParentObject: {fileID: 0}
IsEnabled: 1
DetectHold: 1
HoldTime: 0.8
RollOffTime: 0.02
Keyword:
KeywordRequiresGaze: 1
OnSelectEvents:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 137962128}
m_MethodName: set_text
m_Mode: 5
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument: Tap
m_BoolArgument: 0
m_CallState: 2
m_TypeName: UnityEngine.Events.UnityEvent, UnityEngine.CoreModule, Version=0.0.0.0,
Culture=neutral, PublicKeyToken=null
OnDownEvent:
m_PersistentCalls:
m_Calls: []
m_TypeName: UnityEngine.Events.UnityEvent, UnityEngine.CoreModule, Version=0.0.0.0,
Culture=neutral, PublicKeyToken=null
OnHoldEvent:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 137962128}
m_MethodName: set_text
m_Mode: 5
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument: Hold
m_BoolArgument: 0
m_CallState: 2
m_TypeName: UnityEngine.Events.UnityEvent, UnityEngine.CoreModule, Version=0.0.0.0,
Culture=neutral, PublicKeyToken=null
--- !u!23 &1541181466
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1541181464}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: a01f482e2ab5c894ba4e1e52a3cf351c, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!65 &1541181467
BoxCollider:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1541181464}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 1
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!33 &1541181468
MeshFilter:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1541181464}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
--- !u!4 &1541181469
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1541181464}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: -0.22, y: 0, z: 1}
m_LocalScale: {x: 0.35, y: 0.35, z: 0.1}
m_Children:
- {fileID: 588563754}
- {fileID: 1133174990}
- {fileID: 1079276088}
- {fileID: 137962127}
- {fileID: 1828910919}
- {fileID: 481182370}
m_Father: {fileID: 1819360065}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &1774033262
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 1774033264}
- component: {fileID: 1774033263}
m_Layer: 0
m_Name: Directional Light
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!108 &1774033263
Light:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1774033262}
m_Enabled: 1
serializedVersion: 8
m_Type: 1
m_Color: {r: 1, g: 0.95686275, b: 0.8392157, a: 1}
m_Intensity: 1
m_Range: 10
m_SpotAngle: 30
m_CookieSize: 10
m_Shadows:
m_Type: 2
m_Resolution: -1
m_CustomResolution: -1
m_Strength: 1
m_Bias: 0.05
m_NormalBias: 0.4
m_NearPlane: 0.2
m_Cookie: {fileID: 0}
m_DrawHalo: 0
m_Flare: {fileID: 0}
m_RenderMode: 0
m_CullingMask:
serializedVersion: 2
m_Bits: 4294967295
m_Lightmapping: 4
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0
m_ShadowAngle: 0
--- !u!4 &1774033264
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1774033262}
m_LocalRotation: {x: 0.40821788, y: -0.23456968, z: 0.10938163, w: 0.8754261}
m_LocalPosition: {x: 0, y: 3, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 50, y: -30, z: 0}
--- !u!1 &1819360063
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 1819360065}
- component: {fileID: 1819360064}
m_Layer: 0
m_Name: SceneContent
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &1819360064
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1819360063}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 906323c940a3fad4f8f7e9e4fcd747f4, type: 3}
m_Name:
m_EditorClassIdentifier:
--- !u!4 &1819360065
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1819360063}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children:
- {fileID: 565675569}
- {fileID: 1541181469}
m_Father: {fileID: 0}
m_RootOrder: 4
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &1828910918
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 1828910919}
- component: {fileID: 1828910923}
- component: {fileID: 1828910922}
- component: {fileID: 1828910921}
- component: {fileID: 1828910920}
m_Layer: 0
m_Name: Effect
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &1828910919
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1828910918}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0.05}
m_LocalScale: {x: 1.1, y: 1.1, z: 1}
m_Children: []
m_Father: {fileID: 1541181469}
m_RootOrder: 4
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &1828910920
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1828910918}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: a60b4c2407cc2d848be7c75de1e789e0, type: 3}
m_Name:
m_EditorClassIdentifier:
InteractiveHost: {fileID: 0}
EffectColors:
- {r: 0.05490196, g: 0.65882355, b: 0.6, a: 1}
- {r: 0.9019608, g: 0.9019608, b: 0.9019608, a: 1}
EffectScale:
- {x: 1.1, y: 1.1, z: 1}
- {x: 1.15, y: 1.15, z: 1}
EffectPosition:
- {x: 0, y: 0, z: 0.05}
- {x: 0, y: 0, z: 0.6}
--- !u!23 &1828910921
MeshRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1828910918}
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_DynamicOccludee: 1
m_MotionVectors: 1
m_LightProbeUsage: 1
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: 685f5d363eec7354b8d5dfbddbc86e28, type: 2}
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 1
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_StitchLightmapSeams: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!65 &1828910922
BoxCollider:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1828910918}
m_Material: {fileID: 0}
m_IsTrigger: 0
m_Enabled: 0
serializedVersion: 2
m_Size: {x: 1, y: 1, z: 1}
m_Center: {x: 0, y: 0, z: 0}
--- !u!33 &1828910923
MeshFilter:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1828910918}
m_Mesh: {fileID: 10202, guid: 0000000000000000e000000000000000, type: 0}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="ascii"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>plaintext</title>
<link rel="stylesheet" href="epydoc.css" type="text/css" />
<script type="text/javascript" src="epydoc.js"></script>
</head>
<body bgcolor="white" text="black" link="blue" vlink="#204080"
alink="#204080">
<h1 class="toc">Module plaintext</h1>
<hr />
<h2 class="toc">Classes</h2>
<a target="mainFrame" href="epydoc.markup.plaintext.ParsedPlaintextDocstring-class.html"
>ParsedPlaintextDocstring</a><br /> <h2 class="toc">Functions</h2>
<a target="mainFrame" href="epydoc.markup.plaintext-module.html#parse_docstring"
>parse_docstring</a><br /><hr />
<span class="options">[<a href="javascript:void(0);" class="privatelink"
onclick="toggle_private();">hide private</a>]</span>
<script type="text/javascript">
<!--
// Private objects are initially displayed (because if
// javascript is turned off then we want them to be
// visible); but by default, we want to hide them. So hide
// them unless we have a cookie that says to show them.
checkCookie();
// -->
</script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
<?include get_cfg_var("cartulary_conf").'/includes/env.php';?>
<?include "$confroot/$templates/php_cgi_init.php"?>
<?
// Json header
header("Cache-control: no-cache, must-revalidate");
header("Content-Type: application/json");
// Globals
$jsondata = array();
//debug request
//loggit(3, "DEBUG: ".print_r($_REQUEST, TRUE));
//Check that s3 is enabled
if( !s3_is_enabled($uid) && !sys_s3_is_enabled() ) {
//Log it
loggit(2,"User didn't have s3 enabled for opml save: [$uid].");
$jsondata['status'] = "false";
$jsondata['description'] = "Configure s3 in the prefs to enable saving.";
echo json_encode($jsondata);
exit(1);
}
//Get the title
$title = "";
if ( isset($_REQUEST['title']) ) {
$title = $_REQUEST['title'];
}
//Opml type
if(isset($_REQUEST['type']) && is_numeric($_REQUEST['type'])) {
$type = $_REQUEST['type'];
if( $type == 1 ) loggit(3, "DEBUG: RSS file from editor.");
} else {
$type = 0;
}
//Render the title?
$rendertitle = TRUE;
if ( isset($_REQUEST['rendertitle']) && $_REQUEST['rendertitle'] == "false" ) {
$rendertitle = FALSE;
}
loggit(3, "DEBUG: [".$_REQUEST['rendertitle']."]");
//Get the redirect source
$rhost = "";
if ( isset($_REQUEST['redirect']) && !empty($_REQUEST['redirect']) ) {
$rhost = $_REQUEST['redirect'];
if($rhost == $system_fqdn) {
//Log it
loggit(2,"User tried to set a document redirect to the system FQDN: [$uid|$rhost].");
$jsondata['status'] = "false";
$jsondata['description'] = "You can't use that host name as a redirect.";
echo json_encode($jsondata);
exit(1);
}
}
//Get disqus bool
$disqus = FALSE;
if ( isset($_REQUEST['disqus']) && $_REQUEST['disqus'] == "true" ) {
$disqus = TRUE;
}
//Get wysiwyg bool
$wysiwyg = FALSE;
if ( isset($_REQUEST['wysiwyg']) && $_REQUEST['wysiwyg'] == "true" ) {
$wysiwyg = TRUE;
}
//Is this an article that was edited
$aid = "";
if ( isset($_REQUEST['aid']) && $_REQUEST['aid'] != "false" ) {
$aid = $_REQUEST['aid'];
}
//Do we need to overwrite the existing article
$articleoverwrite = "";
if ( isset($_REQUEST['articleoverwrite']) && $_REQUEST['articleoverwrite'] != "false" ) {
$articleoverwrite = $_REQUEST['articleoverwrite'];
}
//Get watched bool
$watched = FALSE;
if ( isset($_REQUEST['watched']) && $_REQUEST['watched'] == "true" ) {
$watched = TRUE;
}
//Get locked bool
$locked = FALSE;
if ( isset($_REQUEST['locked']) && $_REQUEST['locked'] == "true" ) {
$locked = TRUE;
}
//Get private bool
$private = FALSE;
if ( isset($_REQUEST['private']) && $_REQUEST['private'] == "true" ) {
$private = TRUE;
}
//Get private token
$privtoken = "";
if ( isset($_REQUEST['privtoken']) && !empty($_REQUEST['privtoken']) ) {
$privtoken = $_REQUEST['privtoken'];
}
//Get variables if any
$variables = [];
if ( isset($_REQUEST['variables']) && !empty($_REQUEST['variables'])) {
$variables = $_REQUEST['variables'];
loggit(3, print_r($variables, TRUE));
}
$templateid = "";
if ( isset($_REQUEST['templateid']) && !empty($_REQUEST['templateid'])) {
$templateid = $_REQUEST['templateid'];
loggit(3, "Template id: [$templateid]");
}
//Get a template name if set
$templatename = "";
if ( isset($_REQUEST['templatename']) && !empty($_REQUEST['templatename']) ) {
$templatename = $_REQUEST['templatename'];
loggit(3, "Template name: [$templatename]");
}
//If the outline is private make sure we have a token
if( $private && empty($privtoken) ) {
$privtoken = time().random_gen(64);
}
//Make sure we have a filename to use
if ( isset($_REQUEST['filename']) ) {
$filename = $_REQUEST['filename'];
} else {
//Log it
loggit(2,"No filename was set for this opml save.");
$jsondata['status'] = "false";
$jsondata['description'] = "No filename given.";
echo json_encode($jsondata);
exit(1);
};
//Do we have an old filename? If so, this is a file name change
$oldfilename = "";
if ( isset($_REQUEST['oldfilename']) ) {
$oldfilename = $_REQUEST['oldfilename'];
};
//Get the opml data
if ( isset($_REQUEST['opml']) ) {
$opml = $_REQUEST['opml'];
} else {
//Log it
loggit(2,"No opml data was set for this opml save.");
$jsondata['status'] = "false";
$jsondata['description'] = "No opml data given.";
echo json_encode($jsondata);
exit(1);
};
//We're going to need the S3 url of this file to continue
$s3url = get_s3_url($uid, "/opml/", $filename);
//Before we do anything we need to confirm that this user id has permissions to update this file. If its a new
//file, we need to allow for that
if( !empty($templateid) ) {
if( !user_can_edit_recent_file_by_id($uid, $templateid) ) {
//Log it
loggit(2,"User: [$uid] tried to save variables for a template id they don't own.");
$jsondata['status'] = "false";
$jsondata['description'] = "You don't have permission to work with this template.";
echo json_encode($jsondata);
exit(1);
}
}
//Put the opml file in S3
$s3info = get_s3_info($uid);
if(!$private) {
$s3res = putInS3(gzencode($opml), $filename, $s3info['bucket']."/opml", $s3info['key'], $s3info['secret'], array(
'Content-Type' => 'text/xml',
'Content-Encoding' => 'gzip'
), $private);
if(!$s3res) {
loggit(2, "Could not create S3 file: [$filename] for user: [$uid].");
loggit(3, "Could not create S3 file: [$filename] for user: [$uid].");
//Log it
$jsondata['status'] = "false";
$jsondata['description'] = "Error writing to S3.";
echo json_encode($jsondata);
exit(1);
} else {
loggit(1, "Wrote opml to S3 at url: [$s3url].");
}
} else {
//Delete the opml file from S3 if the outline is marked private since it's not even usable
loggit(3, "Deleting private outline OPML in S3: [".$s3info['bucket']."/opml"." | ".$filename."]");
$s3res = deleteFromS3($filename, $s3info['bucket']."/opml", $s3info['key'], $s3info['secret']);
}
//Put the opml content in IPFS
$opmlhash = add_content_to_ipfs($opml);
//loggit(3, "DEBUG: [".print_r($opmlhash, TRUE)."]");
//Assemble an old url if we had an old filename
$s3oldurl = "";
if ( !empty($oldfilename) ) {
$s3oldurl = get_s3_url($uid, "/opml/", $oldfilename);
};
//Put the html file in S3
$htmlfilename = str_replace('.opml', '.html', $filename);
$s3htmlauthurl = "";
$s3htmlurl = get_s3_url($uid, "/html/", $htmlfilename);
$htmldata = process_opml_to_html($opml, $title, $uid, $disqus, $s3url, $rendertitle, $s3htmlurl);
$s3res = putInS3(gzencode($htmldata), $htmlfilename, $s3info['bucket']."/html", $s3info['key'], $s3info['secret'], array(
'Content-Type' => 'text/html',
'Cache-Control' => 'max-age=31556926',
'Content-Encoding' => 'gzip'
), $private);
if(!$s3res) {
loggit(2, "Could not create S3 file: [$htmlfilename] for user: [$uid].");
loggit(3, "Could not create S3 file: [$htmlfilename] for user: [$uid].");
//Log it
$jsondata['status'] = "false";
$jsondata['description'] = "Error writing HTML to S3.";
echo json_encode($jsondata);
exit(1);
} else {
$s3html = get_s3_url($uid, "/html/", $htmlfilename);
loggit(1, "Wrote html to S3 at url: [$s3html].");
//If the HTML file was private we need to create an authenticated url for accessing it that lasts for the maximum time of 7 days
if($private) {
//$s3htmlauthurl = get_s3_authenticated_url($s3info['key'], $s3info['secret'], $s3info['bucket'], "html/$htmlfilename", 10080);
}
}
//Put the html content in ipfs
$htmlhash = add_content_to_ipfs($htmldata);
//Is this an RSS file type?
$s3json = "";
if( $type == 1 ) {
//Put the RSS in S3
$rssfilename = str_replace('.opml', '.xml', $filename);
$s3rssurl = get_s3_url($uid, "/rss/", $rssfilename);
$rssdata = convert_opml_to_rss($opml, $uid);
if($rssdata == FALSE || (is_numeric($rssdata) && $rssdata < 0)) {
loggit(2, "RSS Error code: [$rssdata]");
$jsondata['status'] = "false";
if($rssdata == -5) {
$jsondata['description'] = "One of the enclosures in this feed returned a bad response code. Make sure the url for the enclosure file is valid.";
} else
if($rssdata == -4) {
$jsondata['description'] = "One of the items has both a blank title and description. At least one is required.";
} else
if($rssdata == -3) {
$jsondata['description'] = "A non-specific exception occured while building the rss feed.";
} else
if($rssdata == -2) {
$jsondata['description'] = "There were no 'item' nodes found while building the rss feed.";
} else {
$jsondata['description'] = "An unknown error occurred during OPML to RSS conversion.";
}
echo json_encode($jsondata);
exit(1);
}
$s3res = putInS3(gzencode($rssdata), $rssfilename, $s3info['bucket']."/rss", $s3info['key'], $s3info['secret'], array(
'Content-Type' => 'application/rss+xml',
'Content-Encoding' => 'gzip'
), $private);
if(!$s3res) {
loggit(2, "Could not create S3 file: [$rssfilename] for user: [$uid].");
loggit(3, "Could not create S3 file: [$rssfilename] for user: [$uid].");
//Log it
$jsondata['status'] = "false";
$jsondata['description'] = "Error writing rss to S3.";
echo json_encode($jsondata);
exit(1);
} else {
$s3rss = get_s3_url($uid, "/rss/", $rssfilename);
loggit(3, "Wrote rss to S3 at url: [$s3rss].");
//set_s3_bucket_cors($s3info['key'], $s3info['secret'], $s3info['bucket']);
}
}
//Get the current file details
if( $s3oldurl != $s3url && !empty($s3oldurl) && !empty($s3url) ) {
$cfile = get_recent_file_by_url($uid, $s3oldurl, TRUE);
} else {
$cfile = get_recent_file_by_url($uid, $s3url, TRUE);
}
//Update the recent file version table
$temp_opml = preg_replace('/\<dateModified\>.*\<\/dateModified\>/', '', $opml);
$temp_prevopml = preg_replace('/\<dateModified\>.*\<\/dateModified\>/', '', $cfile['content']);
if( $temp_opml != $temp_prevopml && !empty($cfile['content']) && !empty($opml) && !empty($temp_opml) && !empty($temp_prevopml) ) {
loggit(3, "DEBUG: Editor file content changed. Saving old version in version table.");
add_recent_file_version($uid, $s3url, $cfile['title'], $cfile['content'], $cfile['type'], $cfile['disqus'], $cfile['wysiwyg'], $cfile['watched'], $cfile['articleid'], $cfile['locked'], $cfile['ipfshash'], $private, "", $templatename);
} else {
loggit(3, "DEBUG: Editor file content not changed.");
}
//Update timestamp on source template when generating a new file from it so that it doesn't get pushed way down
if( !empty($templateid) ) {
touch_recent_file_by_id($templateid);
}
//Update recent file table
$rid = update_recent_file($uid, $s3url, $title, $opml, $type, $s3oldurl, $disqus, $wysiwyg, $watched, $aid, $locked, $opmlhash, $private, $privtoken, $templatename);
loggit(3, "DEBUG: Recent file id is [$rid].");
//Was this an edited article content request
if( $articleoverwrite && !empty($aid) ) {
add_edited_content_to_article($aid, $uid, convert_opml_to_html($opml));
set_article_title($aid, $title);
}
//Go ahead and put in the urls we saved
$jsondata['url'] = $s3url;
$jsondata['html'] = $s3html;
$jsondata['htmlauth'] = $s3htmlauthurl;
if(!empty($opmlhash)) {
$jsondata['ipfs']['opml'] = $opmlhash;
}
if(!empty($htmlhash)) {
$jsondata['ipfs']['html'] = $htmlhash;
}
//Extract and add watched urls if this is a watched outline
remove_watched_urls_by_file_id($rid);
if($watched) {
$includes = get_includes_from_outline($opml);
foreach( $includes as $include ) {
$u = get_watched_url_by_url($include);
if( empty($u) ) {
$u['lastmodified'] = "";
$u['content'] = "";
}
add_watched_url($rid, $include, $u['lastmodified'], $u['content']);
}
}
//Update the redirector table
if( !empty($rhost) ) {
//Let's not clobber existing redirects
$erurl = get_redirection_url_by_host_name($rhost);
if( !empty($erurl) && $erurl != $s3htmlurl ) {
$erurl = str_replace('.html', '.opml', $erurl);
$erurl = str_replace('/html/', '/opml/', $erurl);
//Log it
loggit(2,"Attempted redirection hostname already exists: [$rhost].");
$jsondata['status'] = "false";
$jsondata['duration'] = 20;
$jsondata['description'] = "Attempted redirection hostname already in use by <a target='_blank' href=\"/editor?url=$erurl\">this</a> outline.";
echo json_encode($jsondata);
exit(1);
}
//Update the redirection table
update_redirection_host_name_by_url($s3html, $rhost, $uid);
//Parse out the url to find the bucket and key names
if( stripos($rhost, 'http') !== 0) {
$rhost = 'http://'.$rhost;
}
$purl = parse_url($rhost);
//See if the host of this url is a bucket
$buckets = get_s3_buckets($s3info['key'], $s3info['secret']);
$search_array = array_map('strtolower', $buckets);
if( in_array(strtolower($purl['host']), $search_array) ) {
//Create the index stub that will redirect via a meta-refresh
$rfile = create_short_url_file($s3html, $uid);
loggit(3, "DEBUG: [".print_r($purl, TRUE));
loggit(3, "DEBUG: ".print_r($search_array, TRUE));
//Pull out the last part of the path to use as a file stub name
if( !empty($purl['path']) ) {
$pathFragments = explode('/', rtrim($purl['path'], '/'));
loggit(3, "DEBUG: ".print_r($pathFragments, TRUE));
$pend = trim(end($pathFragments), '/');
array_shift($pathFragments);
array_pop($pathFragments);
$path = implode('/', $pathFragments);
loggit(3, "DEBUG: path is [$path].");
} else {
$path = "";
$pend = "index.html";
}
loggit(3, "DEBUG: ".print_r($pathFragments, TRUE));
$s3path = rtrim($purl['host']."/".trim($path, '/'), '/');
loggit(3, "DEBUG: s3path is [$s3path].");
//Now put the index stub into s3
$s3res = putInS3(gzencode($rfile), $pend, $s3path, $s3info['key'], $s3info['secret'], array(
'Content-Type' => 'text/html',
'Content-Encoding' => 'gzip'
));
if(!$s3res) {
loggit(2, "Could not create S3 file: [index.html] for user: [$uid].");
loggit(3, "Could not create S3 file: [index.html] for user: [$uid].");
//Log it
$jsondata['status'] = "false";
$jsondata['description'] = "Error writing redirection stub to S3.";
echo json_encode($jsondata);
exit(1);
} else {
$redhtml = 'http://'.$s3path."/".$pend;
loggit(3, "DEBUG: Wrote html to S3 at url: [$redhtml].");
}
}
} else {
remove_redirection_by_url($s3html, $uid);
}
//If variables were present, save them
if( $type == 6 && empty($templateid) && !empty($rid) ) {
$templateid = $rid;
}
if( !empty($variables) && !empty($templateid) ) {
foreach( $variables as $variable) {
update_recent_file_variable($uid, $templateid, $variable['name'], $variable['value']);
}
}
//Log it
loggit(3,"Saved: [$filename] to S3 for user: [$uid]. ");
//Give feedback that all went well
$jsondata['status'] = "true";
$jsondata['templateid'] = $templateid;
$jsondata['privtoken'] = $privtoken;
$jsondata['description'] = "File saved to S3.";
echo json_encode($jsondata);
return(0); | {
"pile_set_name": "Github"
} |
<?xml version='1.0' encoding='utf-8'?>
<section xmlns="https://code.dccouncil.us/schemas/dc-library" xmlns:codified="https://code.dccouncil.us/schemas/codified" xmlns:codify="https://code.dccouncil.us/schemas/codify" xmlns:xi="http://www.w3.org/2001/XInclude" containing-doc="D.C. Code">
<num>21-521</num>
<heading>Detention of persons believed to be mentally ill; transportation and application to hospital.</heading>
<text>An accredited officer or agent of the Department of Mental Health of the District of Columbia, or an officer authorized to make arrests in the District of Columbia, or a physician or qualified psychologist of the person in question, who has reason to believe that a person is mentally ill and, because of the illness, is likely to injure himself or others if he is not immediately detained may, without a warrant, take the person into custody, transport him to a public or private hospital, or to the Department, and make application for his admission thereto for purposes of emergency observation and diagnosis. The application shall reveal the circumstances under which the person was taken into custody and the reasons therefor.</text>
<annotations>
<annotation doc="Pub. L. 89-183" type="History">Sept. 14, 1965, 79 Stat. 753, Pub. L. 89-183, § 1</annotation>
<annotation doc="Pub. L. 91-358" type="History">July 29, 1970, 84 Stat. 567, Pub. L. 91-358, title I, § 150(c)(2)</annotation>
<annotation doc="D.C. Law 5-48" type="History">Feb. 24, 1984, D.C. Law 5-48,§ 11(a)(8), 30 DCR 5778</annotation>
<annotation doc="D.C. Law 7-104" type="History">Apr. 30, 1988, D.C. Law 7-104, § 6(f), 35 DCR 147</annotation>
<annotation doc="D.C. Law 14-56" type="History">Dec. 18, 2001, D.C. Law 14-56, § 116(g)(1), 48 DCR 7674</annotation>
<annotation doc="D.C. Law 14-283" type="History">Apr. 4, 2003, D.C. Law 14-283, § 2(h), 50 DCR 917</annotation>
<annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 2(h) of Mental Health Civil Commitment Congressional Review Emergency Act of 2003 (D.C. Act 15-41, March 24, 2003, 50 DCR 2784).</annotation>
<annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 2(h) of Mental Health Civil Commitment Emergency Act of 2002 (D.C. Act 14-546, December 12, 2002, 50 DCR 199).</annotation>
<annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 2(g) of Mental Health Commitment Congressional Review Emergency Act of 2002 (D.C. Act 14-350, April 24, 2002, 49 DCR 4417).</annotation>
<annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 2(g) of Mental Health Commitment Emergency Amendment Act of 2002 (D.C. Act 14-265, January 30, 2002, 49 DCR 1450).</annotation>
<annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 116(g)(1) of Mental Health Service Delivery Reform Congressional Review Emergency Act of 2001 (D.C. Act 14-144, October 23, 2001, 48 DCR 9947).</annotation>
<annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 16(g)(1) of Department of Mental Health Establishment Congressional Review Emergency Amendment Act of 2001 (D.C. Act 14-101, July 23, 2001, 48 DCR 7123).</annotation>
<annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 16(g)(1) of Department of Mental Health Establishment Emergency Amendment Act of 2001 (D.C. Act 14-55, May 2, 2001, 48 DCR 4390).</annotation>
<annotation type="Temporary Legislation">Section 5(b) of <cite doc="D.C. Law 14-131">D.C. Law 14-131</cite> provided that the act shall expire after 225 days of its having taken effect.</annotation>
<annotation type="Temporary Legislation">Section 2(g) of <cite doc="D.C. Law 14-131">D.C. Law 14-131</cite>, in the first sentence, added “, or to the Department,” following “or private hospital”.</annotation>
<annotation type="Temporary Legislation">Section 19(b) of <cite doc="D.C. Law 14-51">D.C. Law 14-51</cite> provided that the act shall expire after 225 days of its having taken effect.</annotation>
<annotation type="Temporary Legislation">Section 16(g)(1) of <cite doc="D.C. Law 14-51">D.C. Law 14-51</cite> substituted “Department of Mental Health” for “Department of Human Services” in the first sentence.</annotation>
<annotation type="Effect of Amendments"><cite doc="D.C. Law 14-283">D.C. Law 14-283</cite> added “, or to the Department” after “or private hospital”.</annotation>
<annotation type="Effect of Amendments"><cite doc="D.C. Law 14-56">D.C. Law 14-56</cite> substituted “Department of Mental Health” for “Department of Human Services”.</annotation>
<annotation type="Prior Codifications">1973 Ed., § 21-521.</annotation>
<annotation type="Prior Codifications">1981 Ed., § 21-521.</annotation>
<annotation type="Section References">This section is referenced in <cite path="§7-1203.03">§ 7-1203.03</cite>, <cite path="§16-2315">§ 16-2315</cite>, <cite path="§21-522">§ 21-522</cite>, and <cite path="§21-582">§ 21-582</cite>.</annotation>
<annotation type="Cross References">St. Elizabeths Hospital, commitment of mentally ill persons, see §§ <cite path="§21-901">21-901</cite> et seq., <cite path="§44-901">44-901</cite> et seq.</annotation>
<annotation type="Cross References">Release of dower, see <cite path="§19-107">§ 19-107</cite>a.</annotation>
<annotation type="Cross References">Redemption from tax sale, removal of disability, see <cite path="§47-1304">§ 47-1304</cite>.</annotation>
<annotation type="Cross References">Real estate leases, mentally ill person’s rights, see <cite path="§42-3222">§ 42-3222</cite> et seq.</annotation>
<annotation type="Cross References">Property of mentally ill persons, see <cite path="§21-2001">§ 21-2001</cite> et seq.</annotation>
<annotation type="Cross References">Physician or qualified psychologist related by blood or marriage to alleged mentally ill person, power to apply or certify mental status, see <cite path="§21-582">§ 21-582</cite>.</annotation>
<annotation type="Cross References">Personal property schedule filing, persons under disability, see <cite path="§47-1601">§ 47-1601</cite>.</annotation>
<annotation type="Cross References">Militia service exemption, see <cite path="§49-401">§ 49-401</cite>.</annotation>
<annotation type="Cross References">Emergency disclosure of mental health information, see <cite path="§7-1203.03">§ 7-1203.03</cite>.</annotation>
<annotation type="Cross References">Conservator, guardian in proceedings for appointment, see <cite path="§21-2041">§ 21-2041</cite> et seq.</annotation>
<annotation type="Cross References">Condemnation of insanitary buildings, appointment of guardian ad litem, see <cite path="§6-909">§ 6-909</cite>.</annotation>
<annotation type="Cross References">Annulment of marriage, <cite path="§46-404">§ 46-404</cite>.</annotation>
</annotations>
</section>
| {
"pile_set_name": "Github"
} |
/**
* Evaluation.java
* It is to evaluate the elapsed time of each sameAs approach
*/
package com.samsung.scrc.wsg.k.eval;
import com.samsung.scrc.wsg.k.sa.matcher.BeliefBasedMatcher;
import com.samsung.scrc.wsg.k.sa.matcher.FullMatcher;
import com.samsung.scrc.wsg.k.sa.matcher.MaxConfMatcher;
import com.samsung.scrc.wsg.k.sa.matcher.One2OneMatcher;
import com.samsung.scrc.wsg.k.sa.matcher.ThresholdMatcher;
import com.samsung.scrc.wsg.k.sa.stat.Stat;
/**
* @author yuxie
*
* @date Apr 6, 2015
*
*/
public class Evaluation {
/**
* Evaluate pre-processing (<Entity, Language Count>)
*/
public void evalPreprocess() {
long startTime = System.currentTimeMillis();
System.out.println("Preprocess Evaluation starts at: "+startTime);
Stat.statLang();
long endTime = System.currentTimeMillis();
System.out.println("Preprocess Evaluation finishes at: "+endTime);
System.out.println("Preprocess Elapsed Time: "+(endTime - startTime) / 1000.0+"s.");
}
/**
* Evaluate full matcher approach
*/
public void evalFullMatcher() {
long startTime = System.currentTimeMillis();
System.out.println("Full Matcher Evaluation starts at: "+startTime);
FullMatcher matcher = new FullMatcher();
matcher.init();
matcher.match();
matcher.close();
long endTime = System.currentTimeMillis();
System.out.println("Full Matcher Evaluation finishes at: "+endTime);
System.out.println("Full Matcher Elapsed Time: " + (endTime - startTime)
/ 1000.0 + "s.");
}
/**
* Evaluate max confidence approach
*/
public void evalMaxConfMatcher() {
long startTime = System.currentTimeMillis();
System.out.println("Max Confidence Matcher Evaluation starts at: " + startTime);
Stat.statSA();
long interTime = System.currentTimeMillis();
System.out.println("Max Confidence Matcher Evaluation intermediates at: "
+ interTime);
MaxConfMatcher matcher = new MaxConfMatcher();
matcher.init();
matcher.match();
matcher.close();
long endTime = System.currentTimeMillis();
System.out.println("Max Confidence Evaluation finishes at: " + endTime);
System.out.println("Max Confidence Elapsed Time: " + (endTime - startTime)
/ 1000.0 + "s.");
}
/**
* Evaluate threshold filtering approach
*/
public void evalThresholdMatcher() {
long startTime = System.currentTimeMillis();
System.out.println("Threshold Matcher Evaluation starts at:\t" + startTime);
ThresholdMatcher matcher = new ThresholdMatcher();
matcher.init();
matcher.match();
matcher.close();
long endTime = System.currentTimeMillis();
System.out.println("Threshold Evaluation finishes at:\t" + endTime);
System.out.println("Threshold Elapsed Time:\t" + (endTime - startTime) / 1000.0
+ "s.");
}
/**
* Evaluate one-to-one mapping approach
*/
public void evalOne2OneMatcher() {
long startTime = System.currentTimeMillis();
System.out.println("1-1 Only Matcher Evaluation starts at:\t" + startTime);
One2OneMatcher matcher = new One2OneMatcher();
matcher.init();
matcher.match();
matcher.close();
long endTime = System.currentTimeMillis();
System.out.println("1-1 Only Evaluation finishes at:\t" + endTime);
System.out.println("1-1 Only Elapsed Time:\t" + (endTime - startTime) / 1000.0
+ "s.");
}
/**
* Evaluate belief-base approach
*/
public void evalBeliefBasedMatcher() {
long startTime = System.currentTimeMillis();
System.out.println("Belief-based Evaluation starts at:\t" + startTime);
BeliefBasedMatcher matcher = new BeliefBasedMatcher();
matcher.init();
matcher.match();
matcher.close();
long endTime = System.currentTimeMillis();
System.out.println("Belief-based Evaluation finishes at:\t" + endTime);
System.out.println("Belief-based Elapsed Time:\t" + (endTime - startTime)
/ 1000.0 + "s.");
}
/**
* Control whole evaluation process
*/
public void eval() {
evalPreprocess();
evalFullMatcher();
evalMaxConfMatcher();
evalThresholdMatcher();
evalOne2OneMatcher();
evalBeliefBasedMatcher();
}
}
| {
"pile_set_name": "Github"
} |
Seuss
| {
"pile_set_name": "Github"
} |
{"threshold": 5, "split": ["jUEumBCZY6GRlXbB/uobM53gis/RldnMAfBAnkg=", "e3WhwYy6YUQGedMXkGnxJO6v0ov4cgteapL17wI="], "shares": 9}
{"threshold": 5, "split": ["mrygQzFoasgDY23te4MGqTFXjpS/pMalQiN9Sks=", "XjpXSuBzyfRAbKj7hODzcf0cv0NZsXQDEQiIdtA="], "shares": 9}
{"threshold": 5, "split": ["U64tceO4Ddtr4V6FMXSTJre4f6t4nPczWgtIkfo=", "t3hrQmsqonoBCl7T4f3bLqMShchtOtF3WesMGEs="], "shares": 9}
{"threshold": 5, "split": ["TcOzpm1jNtwsj0cdiLfd6oVHLGMMKRt52t9kU4E=", "RqJ1WKlufadMFwFLbIjvBs82BH/yP8CAT6kyv3M="], "shares": 9} | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>$(CURRENT_PROJECT_VERSION)</string>
<key>NSHumanReadableCopyright</key>
<string>Copyright © 2016 Aditya Vaidyam. All rights reserved.</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2014 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "include/core/SkTypes.h"
#include "include/core/SkData.h"
#include "include/core/SkFontMgr.h"
#include "include/core/SkFontStyle.h"
#include "include/core/SkPaint.h"
#include "include/core/SkRefCnt.h"
#include "include/core/SkStream.h"
#include "include/core/SkString.h"
#include "include/ports/SkFontMgr_android.h"
#include "include/private/SkFixed.h"
#include "include/private/SkTArray.h"
#include "include/private/SkTDArray.h"
#include "include/private/SkTemplates.h"
#include "src/core/SkFontDescriptor.h"
#include "src/core/SkOSFile.h"
#include "src/core/SkTSearch.h"
#include "src/core/SkTypefaceCache.h"
#include "src/ports/SkFontHost_FreeType_common.h"
#include "src/ports/SkFontMgr_android_parser.h"
#include <algorithm>
#include <limits>
class SkData;
class SkTypeface_Android : public SkTypeface_FreeType {
public:
SkTypeface_Android(const SkFontStyle& style,
bool isFixedPitch,
const SkString& familyName)
: INHERITED(style, isFixedPitch)
, fFamilyName(familyName)
{ }
protected:
void onGetFamilyName(SkString* familyName) const override {
*familyName = fFamilyName;
}
SkString fFamilyName;
private:
using INHERITED = SkTypeface_FreeType;
};
class SkTypeface_AndroidSystem : public SkTypeface_Android {
public:
SkTypeface_AndroidSystem(const SkString& pathName,
const bool cacheFontFiles,
int index,
const SkFixed* axes, int axesCount,
const SkFontStyle& style,
bool isFixedPitch,
const SkString& familyName,
const SkTArray<SkLanguage, true>& lang,
FontVariant variantStyle)
: INHERITED(style, isFixedPitch, familyName)
, fPathName(pathName)
, fIndex(index)
, fAxes(axes, axesCount)
, fLang(lang)
, fVariantStyle(variantStyle)
, fFile(cacheFontFiles ? sk_fopen(fPathName.c_str(), kRead_SkFILE_Flag) : nullptr) {
if (cacheFontFiles) {
SkASSERT(fFile);
}
}
std::unique_ptr<SkStreamAsset> makeStream() const {
if (fFile) {
sk_sp<SkData> data(SkData::MakeFromFILE(fFile));
return data ? std::make_unique<SkMemoryStream>(std::move(data)) : nullptr;
}
return SkStream::MakeFromFile(fPathName.c_str());
}
void onGetFontDescriptor(SkFontDescriptor* desc, bool* serialize) const override {
SkASSERT(desc);
SkASSERT(serialize);
desc->setFamilyName(fFamilyName.c_str());
desc->setStyle(this->fontStyle());
*serialize = false;
}
std::unique_ptr<SkStreamAsset> onOpenStream(int* ttcIndex) const override {
*ttcIndex = fIndex;
return this->makeStream();
}
std::unique_ptr<SkFontData> onMakeFontData() const override {
return std::make_unique<SkFontData>(this->makeStream(), fIndex,
fAxes.begin(), fAxes.count());
}
sk_sp<SkTypeface> onMakeClone(const SkFontArguments& args) const override {
std::unique_ptr<SkFontData> data = this->cloneFontData(args);
if (!data) {
return nullptr;
}
return sk_make_sp<SkTypeface_AndroidSystem>(fPathName,
fFile,
fIndex,
data->getAxis(),
data->getAxisCount(),
this->fontStyle(),
this->isFixedPitch(),
fFamilyName,
fLang,
fVariantStyle);
}
const SkString fPathName;
int fIndex;
const SkSTArray<4, SkFixed, true> fAxes;
const SkSTArray<4, SkLanguage, true> fLang;
const FontVariant fVariantStyle;
SkAutoTCallVProc<FILE, sk_fclose> fFile;
using INHERITED = SkTypeface_Android;
};
class SkTypeface_AndroidStream : public SkTypeface_Android {
public:
SkTypeface_AndroidStream(std::unique_ptr<SkFontData> data,
const SkFontStyle& style,
bool isFixedPitch,
const SkString& familyName)
: INHERITED(style, isFixedPitch, familyName)
, fData(std::move(data))
{ }
void onGetFontDescriptor(SkFontDescriptor* desc, bool* serialize) const override {
SkASSERT(desc);
SkASSERT(serialize);
desc->setFamilyName(fFamilyName.c_str());
*serialize = true;
}
std::unique_ptr<SkStreamAsset> onOpenStream(int* ttcIndex) const override {
*ttcIndex = fData->getIndex();
return fData->getStream()->duplicate();
}
std::unique_ptr<SkFontData> onMakeFontData() const override {
return std::make_unique<SkFontData>(*fData);
}
sk_sp<SkTypeface> onMakeClone(const SkFontArguments& args) const override {
std::unique_ptr<SkFontData> data = this->cloneFontData(args);
if (!data) {
return nullptr;
}
return sk_make_sp<SkTypeface_AndroidStream>(std::move(data),
this->fontStyle(),
this->isFixedPitch(),
fFamilyName);
}
private:
const std::unique_ptr<const SkFontData> fData;
using INHERITED = SkTypeface_Android;
};
class SkFontStyleSet_Android : public SkFontStyleSet {
typedef SkTypeface_FreeType::Scanner Scanner;
public:
explicit SkFontStyleSet_Android(const FontFamily& family, const Scanner& scanner,
const bool cacheFontFiles) {
const SkString* cannonicalFamilyName = nullptr;
if (family.fNames.count() > 0) {
cannonicalFamilyName = &family.fNames[0];
}
fFallbackFor = family.fFallbackFor;
// TODO? make this lazy
for (int i = 0; i < family.fFonts.count(); ++i) {
const FontFileInfo& fontFile = family.fFonts[i];
SkString pathName(family.fBasePath);
pathName.append(fontFile.fFileName);
std::unique_ptr<SkStreamAsset> stream = SkStream::MakeFromFile(pathName.c_str());
if (!stream) {
SkDEBUGF("Requested font file %s does not exist or cannot be opened.\n",
pathName.c_str());
continue;
}
const int ttcIndex = fontFile.fIndex;
SkString familyName;
SkFontStyle style;
bool isFixedWidth;
Scanner::AxisDefinitions axisDefinitions;
if (!scanner.scanFont(stream.get(), ttcIndex,
&familyName, &style, &isFixedWidth, &axisDefinitions))
{
SkDEBUGF("Requested font file %s exists, but is not a valid font.\n",
pathName.c_str());
continue;
}
int weight = fontFile.fWeight != 0 ? fontFile.fWeight : style.weight();
SkFontStyle::Slant slant = style.slant();
switch (fontFile.fStyle) {
case FontFileInfo::Style::kAuto: slant = style.slant(); break;
case FontFileInfo::Style::kNormal: slant = SkFontStyle::kUpright_Slant; break;
case FontFileInfo::Style::kItalic: slant = SkFontStyle::kItalic_Slant; break;
default: SkASSERT(false); break;
}
style = SkFontStyle(weight, style.width(), slant);
uint32_t variant = family.fVariant;
if (kDefault_FontVariant == variant) {
variant = kCompact_FontVariant | kElegant_FontVariant;
}
// The first specified family name overrides the family name found in the font.
// TODO: SkTypeface_AndroidSystem::onCreateFamilyNameIterator should return
// all of the specified family names in addition to the names found in the font.
if (cannonicalFamilyName != nullptr) {
familyName = *cannonicalFamilyName;
}
SkAutoSTMalloc<4, SkFixed> axisValues(axisDefinitions.count());
SkFontArguments::VariationPosition position = {
fontFile.fVariationDesignPosition.begin(),
fontFile.fVariationDesignPosition.count()
};
Scanner::computeAxisValues(axisDefinitions, position,
axisValues, familyName);
fStyles.push_back().reset(new SkTypeface_AndroidSystem(
pathName, cacheFontFiles, ttcIndex, axisValues.get(), axisDefinitions.count(),
style, isFixedWidth, familyName, family.fLanguages, variant));
}
}
int count() override {
return fStyles.count();
}
void getStyle(int index, SkFontStyle* style, SkString* name) override {
if (index < 0 || fStyles.count() <= index) {
return;
}
if (style) {
*style = fStyles[index]->fontStyle();
}
if (name) {
name->reset();
}
}
SkTypeface_AndroidSystem* createTypeface(int index) override {
if (index < 0 || fStyles.count() <= index) {
return nullptr;
}
return SkRef(fStyles[index].get());
}
SkTypeface_AndroidSystem* matchStyle(const SkFontStyle& pattern) override {
return static_cast<SkTypeface_AndroidSystem*>(this->matchStyleCSS3(pattern));
}
private:
SkTArray<sk_sp<SkTypeface_AndroidSystem>> fStyles;
SkString fFallbackFor;
friend struct NameToFamily;
friend class SkFontMgr_Android;
using INHERITED = SkFontStyleSet;
};
/** On Android a single family can have many names, but our API assumes unique names.
* Map names to the back end so that all names for a given family refer to the same
* (non-replicated) set of typefaces.
* SkTDict<> doesn't let us do index-based lookup, so we write our own mapping.
*/
struct NameToFamily {
SkString name;
SkFontStyleSet_Android* styleSet;
};
class SkFontMgr_Android : public SkFontMgr {
public:
SkFontMgr_Android(const SkFontMgr_Android_CustomFonts* custom) {
SkTDArray<FontFamily*> families;
if (custom && SkFontMgr_Android_CustomFonts::kPreferSystem != custom->fSystemFontUse) {
SkString base(custom->fBasePath);
SkFontMgr_Android_Parser::GetCustomFontFamilies(
families, base, custom->fFontsXml, custom->fFallbackFontsXml);
}
if (!custom ||
(custom && SkFontMgr_Android_CustomFonts::kOnlyCustom != custom->fSystemFontUse))
{
SkFontMgr_Android_Parser::GetSystemFontFamilies(families);
}
if (custom && SkFontMgr_Android_CustomFonts::kPreferSystem == custom->fSystemFontUse) {
SkString base(custom->fBasePath);
SkFontMgr_Android_Parser::GetCustomFontFamilies(
families, base, custom->fFontsXml, custom->fFallbackFontsXml);
}
this->buildNameToFamilyMap(families, custom ? custom->fIsolated : false);
this->findDefaultStyleSet();
families.deleteAll();
}
protected:
/** Returns not how many families we have, but how many unique names
* exist among the families.
*/
int onCountFamilies() const override {
return fNameToFamilyMap.count();
}
void onGetFamilyName(int index, SkString* familyName) const override {
if (index < 0 || fNameToFamilyMap.count() <= index) {
familyName->reset();
return;
}
familyName->set(fNameToFamilyMap[index].name);
}
SkFontStyleSet* onCreateStyleSet(int index) const override {
if (index < 0 || fNameToFamilyMap.count() <= index) {
return nullptr;
}
return SkRef(fNameToFamilyMap[index].styleSet);
}
SkFontStyleSet* onMatchFamily(const char familyName[]) const override {
if (!familyName) {
return nullptr;
}
SkAutoAsciiToLC tolc(familyName);
for (int i = 0; i < fNameToFamilyMap.count(); ++i) {
if (fNameToFamilyMap[i].name.equals(tolc.lc())) {
return SkRef(fNameToFamilyMap[i].styleSet);
}
}
// TODO: eventually we should not need to name fallback families.
for (int i = 0; i < fFallbackNameToFamilyMap.count(); ++i) {
if (fFallbackNameToFamilyMap[i].name.equals(tolc.lc())) {
return SkRef(fFallbackNameToFamilyMap[i].styleSet);
}
}
return nullptr;
}
SkTypeface* onMatchFamilyStyle(const char familyName[],
const SkFontStyle& style) const override {
sk_sp<SkFontStyleSet> sset(this->matchFamily(familyName));
return sset->matchStyle(style);
}
SkTypeface* onMatchFaceStyle(const SkTypeface* typeface,
const SkFontStyle& style) const override {
for (int i = 0; i < fStyleSets.count(); ++i) {
for (int j = 0; j < fStyleSets[i]->fStyles.count(); ++j) {
if (fStyleSets[i]->fStyles[j].get() == typeface) {
return fStyleSets[i]->matchStyle(style);
}
}
}
return nullptr;
}
static sk_sp<SkTypeface_AndroidSystem> find_family_style_character(
const SkString& familyName,
const SkTArray<NameToFamily, true>& fallbackNameToFamilyMap,
const SkFontStyle& style, bool elegant,
const SkString& langTag, SkUnichar character)
{
for (int i = 0; i < fallbackNameToFamilyMap.count(); ++i) {
SkFontStyleSet_Android* family = fallbackNameToFamilyMap[i].styleSet;
if (familyName != family->fFallbackFor) {
continue;
}
sk_sp<SkTypeface_AndroidSystem> face(family->matchStyle(style));
if (!langTag.isEmpty() &&
std::none_of(face->fLang.begin(), face->fLang.end(), [&](SkLanguage lang){
return lang.getTag().startsWith(langTag.c_str());
}))
{
continue;
}
if (SkToBool(face->fVariantStyle & kElegant_FontVariant) != elegant) {
continue;
}
if (face->unicharToGlyph(character) != 0) {
return face;
}
}
return nullptr;
}
SkTypeface* onMatchFamilyStyleCharacter(const char familyName[],
const SkFontStyle& style,
const char* bcp47[],
int bcp47Count,
SkUnichar character) const override {
// The variant 'elegant' is 'not squashed', 'compact' is 'stays in ascent/descent'.
// The variant 'default' means 'compact and elegant'.
// As a result, it is not possible to know the variant context from the font alone.
// TODO: add 'is_elegant' and 'is_compact' bits to 'style' request.
SkString familyNameString(familyName);
for (const SkString& currentFamilyName : { familyNameString, SkString() }) {
// The first time match anything elegant, second time anything not elegant.
for (int elegant = 2; elegant --> 0;) {
for (int bcp47Index = bcp47Count; bcp47Index --> 0;) {
SkLanguage lang(bcp47[bcp47Index]);
while (!lang.getTag().isEmpty()) {
sk_sp<SkTypeface_AndroidSystem> matchingTypeface =
find_family_style_character(currentFamilyName, fFallbackNameToFamilyMap,
style, SkToBool(elegant),
lang.getTag(), character);
if (matchingTypeface) {
return matchingTypeface.release();
}
lang = lang.getParent();
}
}
sk_sp<SkTypeface_AndroidSystem> matchingTypeface =
find_family_style_character(currentFamilyName, fFallbackNameToFamilyMap,
style, SkToBool(elegant),
SkString(), character);
if (matchingTypeface) {
return matchingTypeface.release();
}
}
}
return nullptr;
}
sk_sp<SkTypeface> onMakeFromData(sk_sp<SkData> data, int ttcIndex) const override {
return this->makeFromStream(std::unique_ptr<SkStreamAsset>(new SkMemoryStream(std::move(data))),
ttcIndex);
}
sk_sp<SkTypeface> onMakeFromFile(const char path[], int ttcIndex) const override {
std::unique_ptr<SkStreamAsset> stream = SkStream::MakeFromFile(path);
return stream ? this->makeFromStream(std::move(stream), ttcIndex) : nullptr;
}
sk_sp<SkTypeface> onMakeFromStreamIndex(std::unique_ptr<SkStreamAsset> stream,
int ttcIndex) const override {
bool isFixedPitch;
SkFontStyle style;
SkString name;
if (!fScanner.scanFont(stream.get(), ttcIndex, &name, &style, &isFixedPitch, nullptr)) {
return nullptr;
}
auto data = std::make_unique<SkFontData>(std::move(stream), ttcIndex, nullptr, 0);
return sk_sp<SkTypeface>(new SkTypeface_AndroidStream(std::move(data),
style, isFixedPitch, name));
}
sk_sp<SkTypeface> onMakeFromStreamArgs(std::unique_ptr<SkStreamAsset> stream,
const SkFontArguments& args) const override {
using Scanner = SkTypeface_FreeType::Scanner;
bool isFixedPitch;
SkFontStyle style;
SkString name;
Scanner::AxisDefinitions axisDefinitions;
if (!fScanner.scanFont(stream.get(), args.getCollectionIndex(),
&name, &style, &isFixedPitch, &axisDefinitions))
{
return nullptr;
}
SkAutoSTMalloc<4, SkFixed> axisValues(axisDefinitions.count());
Scanner::computeAxisValues(axisDefinitions, args.getVariationDesignPosition(),
axisValues, name);
auto data = std::make_unique<SkFontData>(std::move(stream), args.getCollectionIndex(),
axisValues.get(), axisDefinitions.count());
return sk_sp<SkTypeface>(new SkTypeface_AndroidStream(std::move(data),
style, isFixedPitch, name));
}
sk_sp<SkTypeface> onMakeFromFontData(std::unique_ptr<SkFontData> data) const override {
SkStreamAsset* stream(data->getStream());
bool isFixedPitch;
SkFontStyle style;
SkString name;
if (!fScanner.scanFont(stream, data->getIndex(), &name, &style, &isFixedPitch, nullptr)) {
return nullptr;
}
return sk_sp<SkTypeface>(new SkTypeface_AndroidStream(std::move(data),
style, isFixedPitch, name));
}
sk_sp<SkTypeface> onLegacyMakeTypeface(const char familyName[], SkFontStyle style) const override {
if (familyName) {
// On Android, we must return nullptr when we can't find the requested
// named typeface so that the system/app can provide their own recovery
// mechanism. On other platforms we'd provide a typeface from the
// default family instead.
return sk_sp<SkTypeface>(this->onMatchFamilyStyle(familyName, style));
}
return sk_sp<SkTypeface>(fDefaultStyleSet->matchStyle(style));
}
private:
SkTypeface_FreeType::Scanner fScanner;
SkTArray<sk_sp<SkFontStyleSet_Android>> fStyleSets;
sk_sp<SkFontStyleSet> fDefaultStyleSet;
SkTArray<NameToFamily, true> fNameToFamilyMap;
SkTArray<NameToFamily, true> fFallbackNameToFamilyMap;
void addFamily(FontFamily& family, const bool isolated, int familyIndex) {
SkTArray<NameToFamily, true>* nameToFamily = &fNameToFamilyMap;
if (family.fIsFallbackFont) {
nameToFamily = &fFallbackNameToFamilyMap;
if (0 == family.fNames.count()) {
SkString& fallbackName = family.fNames.push_back();
fallbackName.printf("%.2x##fallback", familyIndex);
}
}
sk_sp<SkFontStyleSet_Android> newSet =
sk_make_sp<SkFontStyleSet_Android>(family, fScanner, isolated);
if (0 == newSet->count()) {
return;
}
for (const SkString& name : family.fNames) {
nameToFamily->emplace_back(NameToFamily{name, newSet.get()});
}
fStyleSets.emplace_back(std::move(newSet));
}
void buildNameToFamilyMap(SkTDArray<FontFamily*> families, const bool isolated) {
int familyIndex = 0;
for (FontFamily* family : families) {
addFamily(*family, isolated, familyIndex++);
family->fallbackFamilies.foreach([this, isolated, &familyIndex]
(SkString, std::unique_ptr<FontFamily>* fallbackFamily) {
addFamily(**fallbackFamily, isolated, familyIndex++);
}
);
}
}
void findDefaultStyleSet() {
SkASSERT(!fStyleSets.empty());
static const char* defaultNames[] = { "sans-serif" };
for (const char* defaultName : defaultNames) {
fDefaultStyleSet.reset(this->onMatchFamily(defaultName));
if (fDefaultStyleSet) {
break;
}
}
if (nullptr == fDefaultStyleSet) {
fDefaultStyleSet = fStyleSets[0];
}
SkASSERT(fDefaultStyleSet);
}
using INHERITED = SkFontMgr;
};
#ifdef SK_DEBUG
static char const * const gSystemFontUseStrings[] = {
"OnlyCustom", "PreferCustom", "PreferSystem"
};
#endif
sk_sp<SkFontMgr> SkFontMgr_New_Android(const SkFontMgr_Android_CustomFonts* custom) {
if (custom) {
SkASSERT(0 <= custom->fSystemFontUse);
SkASSERT(custom->fSystemFontUse < SK_ARRAY_COUNT(gSystemFontUseStrings));
SkDEBUGF("SystemFontUse: %s BasePath: %s Fonts: %s FallbackFonts: %s\n",
gSystemFontUseStrings[custom->fSystemFontUse],
custom->fBasePath,
custom->fFontsXml,
custom->fFallbackFontsXml);
}
return sk_make_sp<SkFontMgr_Android>(custom);
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>com.yourcompany.openFrameworks</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1.0</string>
</dict>
</plist>
| {
"pile_set_name": "Github"
} |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class AdmissionregistrationApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def get_api_group(self, **kwargs):
"""
get information of a group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_group(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_api_group_with_http_info(**kwargs)
else:
(data) = self.get_api_group_with_http_info(**kwargs)
return data
def get_api_group_with_http_info(self, **kwargs):
"""
get information of a group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_group_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_group" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/admissionregistration.k8s.io/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIGroup',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
# Make sure we're using the version of pylib in this repo, not one installed
# elsewhere on the system.
sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib'))
import gyp
if __name__ == '__main__':
sys.exit(gyp.script_main())
| {
"pile_set_name": "Github"
} |
//
// ActionSheetColors.swift
// Sheeeeeeeeet
//
// Created by Daniel Saidi on 2019-08-10.
// Copyright © 2019 Daniel Saidi. All rights reserved.
//
import UIKit
/**
This enum specifies standard action sheet colors, which are
just aliases for system colors. Use the `color` property to
get the resolved `UIColor` representation.
The enum cases are aliases for iOS 13 semantic and adaptive
colors, but provide fallbacks to older, non-adaptive colors
on iOS 12 and earlier.
*/
public enum ActionSheetColor: CaseIterable {
case actionText
case background
case danger
case disabledText
case discreteText
case overlay
case separator
case text
case tint
}
// MARK: - Public Extensions
public extension ActionSheetColor {
/**
Get the UI color representation of the semantic color.
*/
var color: UIColor {
if #available(iOS 13.0, *) {
return adaptiveColor
} else {
return legacyColor
}
}
}
// MARK: - Private Extensions
private extension ActionSheetColor {
var overlay: UIColor {
UIColor.black.withAlphaComponent(0.6)
}
@available(iOS 13.0, *)
var adaptiveColor: UIColor {
switch self {
case .actionText: return .systemBlue
case .background: return .tertiarySystemBackground
case .danger: return .systemRed
case .disabledText: return .secondaryLabel
case .discreteText: return .secondaryLabel
case .overlay: return overlay
case .separator: return .separator
case .text, .tint: return .label
}
}
var legacyColor: UIColor {
switch self {
case .actionText: return .blue
case .background: return .white
case .danger: return .red
case .disabledText: return .lightGray
case .discreteText: return .lightGray
case .overlay: return overlay
case .separator: return .lightGray
case .text, .tint: return .darkText
}
}
}
| {
"pile_set_name": "Github"
} |
#!/bin/sh
# SPDX-License-Identifier: GPL-2.0
# Simple script to update the version of DTC carried by the Linux kernel
#
# This script assumes that the dtc and the linux git trees are in the
# same directory. After building dtc in the dtc directory, it copies the
# source files and generated source file(s) into the scripts/dtc directory
# in the kernel and creates a git commit updating them to the new
# version.
#
# Usage: from the top level Linux source tree, run:
# $ ./scripts/dtc/update-dtc-source.sh
#
# The script will change into the dtc tree, build and test dtc, copy the
# relevant files into the kernel tree and create a git commit. The commit
# message will need to be modified to reflect the version of DTC being
# imported
#
# TODO:
# This script is pretty basic, but it is seldom used so a few manual tasks
# aren't a big deal. If anyone is interested in making it more robust, the
# the following would be nice:
# * Actually fail to complete if any testcase fails.
# - The dtc "make check" target needs to return a failure
# * Extract the version number from the dtc repo for the commit message
# * Build dtc in the kernel tree
# * run 'make check" on dtc built from the kernel tree
set -ev
DTC_UPSTREAM_PATH=`pwd`/../dtc
DTC_LINUX_PATH=`pwd`/scripts/dtc
DTC_SOURCE="checks.c data.c dtc.c dtc.h flattree.c fstree.c livetree.c srcpos.c \
srcpos.h treesource.c util.c util.h version_gen.h yamltree.c Makefile.dtc \
dtc-lexer.l dtc-parser.y"
LIBFDT_SOURCE="Makefile.libfdt fdt.c fdt.h fdt_addresses.c fdt_empty_tree.c \
fdt_overlay.c fdt_ro.c fdt_rw.c fdt_strerror.c fdt_sw.c \
fdt_wip.c libfdt.h libfdt_env.h libfdt_internal.h"
get_last_dtc_version() {
git log --oneline scripts/dtc/ | grep 'upstream' | head -1 | sed -e 's/^.* \(.*\)/\1/'
}
last_dtc_ver=$(get_last_dtc_version)
# Build DTC
cd $DTC_UPSTREAM_PATH
make clean
make check
dtc_version=$(git describe HEAD)
dtc_log=$(git log --oneline ${last_dtc_ver}..)
# Copy the files into the Linux tree
cd $DTC_LINUX_PATH
for f in $DTC_SOURCE; do
cp ${DTC_UPSTREAM_PATH}/${f} ${f}
git add ${f}
done
for f in $LIBFDT_SOURCE; do
cp ${DTC_UPSTREAM_PATH}/libfdt/${f} libfdt/${f}
git add libfdt/${f}
done
sed -i -- 's/#include <libfdt_env.h>/#include "libfdt_env.h"/g' ./libfdt/libfdt.h
sed -i -- 's/#include <fdt.h>/#include "fdt.h"/g' ./libfdt/libfdt.h
git add ./libfdt/libfdt.h
commit_msg=$(cat << EOF
scripts/dtc: Update to upstream version ${dtc_version}
This adds the following commits from upstream:
${dtc_log}
EOF
)
git commit -e -v -s -m "${commit_msg}"
| {
"pile_set_name": "Github"
} |
// VulcanizeDB
// Copyright © 2019 Vulcanize
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package shared
// These types serve as very loose wrappers around a generic underlying interface{}
type RawChainData interface{}
// The concrete type underneath StreamedIPLDs should not be a pointer
type ConvertedData interface {
Height() int64
}
type CIDsForIndexing interface{}
type CIDsForFetching interface{}
type IPLDs interface {
Height() int64
}
type Gap struct {
Start uint64
Stop uint64
}
| {
"pile_set_name": "Github"
} |
package utils
import (
"testing"
)
func TestCompareVersionVersion(t *testing.T) {
v1 := "v1.12.1"
v2 := "v1.11.2"
ret, err := CompareVersion(v1, v2)
if err != nil {
t.Fatal(err)
}
if !ret {
t.Fatal("Version comparison failed.")
}
}
| {
"pile_set_name": "Github"
} |
<section>
<section>
<title>Properties</title>
<table>
<thead>
<tr>
<td>Name</td>
</tr>
</thead>
</table>
</section>
<section>
<title>Methods</title>
<table>
<thead>
<tr>
<td>Name</td>
</tr>
</thead>
</table>
</section>
</section>
| {
"pile_set_name": "Github"
} |
StartChar: u1EE61
Encoding: 126561 126561 5990
Width: 436
Flags: HW
LayerCount: 2
Fore
Refer: 6051 126588 N 1 0 0 1 0 0 3
Refer: 194 -1 N 1 0 0 1 244 -239 2
EndChar
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head><link rel="apple-touch-icon" sizes="180x180" href="/glide/apple-touch-icon.png"><link rel="icon" type="image/png" sizes="32x32" href="/glide/favicon-32x32.png"><link rel="icon" type="image/png" sizes="16x16" href="/glide/favicon-16x16.png"><link rel="manifest" href="/glide/manifest.json">
<!-- Generated by javadoc (1.8.0_151) on Fri Aug 17 09:17:46 PDT 2018 -->
<title>com.bumptech.glide.integration.volley (glide API)</title>
<meta name="date" content="2018-08-17">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<h1 class="bar"><a href="../../../../../com/bumptech/glide/integration/volley/package-summary.html" target="classFrame">com.bumptech.glide.integration.volley</a></h1>
<div class="indexContainer">
<h2 title="Interfaces">Interfaces</h2>
<ul title="Interfaces">
<li><a href="VolleyRequestFactory.html" title="interface in com.bumptech.glide.integration.volley" target="classFrame"><span class="interfaceName">VolleyRequestFactory</span></a></li>
</ul>
<h2 title="Classes">Classes</h2>
<ul title="Classes">
<li><a href="VolleyGlideModule.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyGlideModule</a></li>
<li><a href="VolleyLibraryGlideModule.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyLibraryGlideModule</a></li>
<li><a href="VolleyStreamFetcher.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyStreamFetcher</a></li>
<li><a href="VolleyStreamFetcher.GlideRequest.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyStreamFetcher.GlideRequest</a></li>
<li><a href="VolleyUrlLoader.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyUrlLoader</a></li>
<li><a href="VolleyUrlLoader.Factory.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyUrlLoader.Factory</a></li>
</ul>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
#!/bin/bash
mkdir var/build
#OS X, prevent ._ files
export COPYFILE_DISABLE=true
tar -cvf var/build/Pulsestorm_Modulelist.tar app/code/community/Pulsestorm/Modulelist app/etc/modules/Pulsestorm_Modulelist.xml
| {
"pile_set_name": "Github"
} |
/*
(c) copyright 1988 by the Vrije Universiteit, Amsterdam, The Netherlands.
See the copyright notice in the ACK home directory, in the file "Copyright".
*/
/*
Module: SYSTEM
Author: Ceriel J.H. Jacobs
Version: $Id$
*/
/*
An implementation of the Modula-2 NEWPROCESS and TRANSFER facilities
using the topsize, topsave, and topload facilities.
For each coroutine, a proc structure is built. For the main routine,
a static space is declared to save its stack. For the other coroutines,
the user specifies this space.
*/
#include <unistd.h>
#include "libm2.h"
#include <m2_traps.h>
#define MAXMAIN 2048
static struct proc mainproc[MAXMAIN / sizeof(struct proc) + 1];
static struct proc* curproc = 0; /* current coroutine */
extern char* MainLB; /* stack break of main routine */
void _SYSTEM__NEWPROCESS(
int (*p)(void) /* coroutine procedure */,
struct proc* a /* pointer to area for saved stack-frame */,
unsigned int n /* size of this area */,
struct proc** p1 /* where to leave coroutine descriptor,
in this implementation the address of
the area for saved stack-frame(s) */
)
{
/* This procedure creates a new coroutine, but does not
transfer control to it. The routine "topsize" will compute the
stack break, which will be the local base of this routine.
Notice that we can do this because we do not need the stack
above this point for this coroutine. In Modula-2, coroutines
must be level 0 procedures without parameters.
*/
char* brk = 0;
unsigned sz = topsize(&brk);
if (sz + sizeof(struct proc) > n)
{
/* not enough space */
TRP(M2_TOOLARGE);
}
a->size = n;
a->proc = p;
a->brk = brk;
*p1 = a;
if (topsave(brk, a + 1))
/* stack frame saved; now just return */
;
else
{
/* We get here through the first transfer to the coroutine
created above.
This also means that curproc is now set to this coroutine.
We cannot trust the parameters anymore.
Just call the coroutine procedure.
*/
(*(curproc->proc))();
_cleanup();
_exit(0);
}
}
void _SYSTEM__TRANSFER(struct proc** a, struct proc** b)
{
/* transfer from one coroutine to another, saving the current
descriptor in the space indicated by "a", and transfering to
the coroutine in descriptor "b".
*/
unsigned size;
if (!curproc)
{
/* the current coroutine is the main process;
initialize a coroutine descriptor for it ...
*/
mainproc[0].brk = MainLB;
mainproc[0].size = sizeof(mainproc);
curproc = &mainproc[0];
}
*a = curproc; /* save current descriptor in "a" */
if (*b == curproc)
{
/* transfer to itself is a no-op */
return;
}
size = topsize(&(curproc->brk));
if (size + sizeof(struct proc) > curproc->size)
{
TRP(M2_TOOLARGE);
}
if (topsave(curproc->brk, curproc + 1))
{
/* stack top saved. Now restore context of target
coroutine
*/
curproc = *b;
topload(curproc + 1);
/* we never get here ... */
}
/* but we do get here, when a transfer is done to the coroutine in "a".
*/
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<dependenciesRoot>
<dependency className="testLangWithRT.typesystem.TypesystemDescriptor">
<classNode dependClassName="jetbrains.mps.lang.typesystem.runtime.InferenceRule_Runtime" />
<classNode dependClassName="testLangWithRT.typesystem.typeof_Sout_InferenceRule" />
<classNode extendsClassName="jetbrains.mps.lang.typesystem.runtime.BaseHelginsDescriptor" />
</dependency>
<dependency className="testLangWithRT.typesystem.typeof_Sout_InferenceRule">
<classNode dependClassName="jetbrains.mps.lang.typesystem.runtime.IsApplicableStatus" />
<classNode dependClassName="jetbrains.mps.smodel.adapter.structure.MetaAdapterFactory" />
<classNode dependClassName="jetbrains.mps.smodel.builder.SNodeBuilder" />
<classNode dependClassName="jetbrains.mps.typesystem.inference.EquationInfo" />
<classNode dependClassName="jetbrains.mps.typesystem.inference.TypeCheckingContext" />
<classNode dependClassName="org.jetbrains.mps.openapi.language.SAbstractConcept" />
<classNode dependClassName="org.jetbrains.mps.openapi.language.SConcept" />
<classNode dependClassName="org.jetbrains.mps.openapi.model.SNode" />
<classNode dependClassName="org.jetbrains.mps.openapi.persistence.PersistenceFacade" />
<classNode extendsClassName="jetbrains.mps.lang.typesystem.runtime.AbstractInferenceRule_Runtime" />
<classNode extendsClassName="jetbrains.mps.lang.typesystem.runtime.InferenceRule_Runtime" />
</dependency>
</dependenciesRoot>
| {
"pile_set_name": "Github"
} |
/*
* Copyright Beijing 58 Information Technology Co.,Ltd.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bj58.oceanus.exchange.executors;
import com.bj58.oceanus.core.context.StatementContext;
import com.bj58.oceanus.exchange.executors.jdbc.BatchExecutor;
import com.bj58.oceanus.exchange.executors.jdbc.SimpleExecutor;
/**
* 执行器构建
*
* @author Service Platform Architecture Team ([email protected])
*/
@SuppressWarnings("rawtypes")
public class ExecutorsBuilder {
static final Executor DEFAULT_EXECUTOR = new SimpleExecutor();
static final Executor BATCH_EXECUTOR = new BatchExecutor();
public static Executor<?> build(StatementContext context) {
if (context.isBatch()) {
return BATCH_EXECUTOR;
}
return DEFAULT_EXECUTOR;
}
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env perl
# [[[ PREPROCESSOR ]]]
# <<< EXECUTE_SUCCESS: "$VAR1 = {'number_hashref' => {'a' => 'number'}};" >>>
# <<< EXECUTE_SUCCESS: "$VAR1 = {'number_hashref' => {'a' => 'number','b' => 'number'}};" >>>
# <<< EXECUTE_SUCCESS: "$VAR1 = {'hashref' => {'a' => 'number','b' => 'integer'}};" >>>
# <<< EXECUTE_SUCCESS: "$VAR1 = {'number_hashref' => {'a' => 'number','b' => 'number','c' => 'number','d' => 'number','e' => 'number'}};" >>>
# <<< EXECUTE_SUCCESS: "$VAR1 = {'hashref' => {'a' => 'number','b' => 'number','c' => 'number','d' => 'integer','e' => 'number'}};" >>>
# [[[ HEADER ]]]
use RPerl;
use strict;
use warnings;
our $VERSION = 0.001_000;
# [[[ CRITICS ]]]
## no critic qw(ProhibitUselessNoCritic ProhibitMagicNumbers RequireCheckedSyscalls) # USER DEFAULT 1: allow numeric values & print operator
## no critic qw(RequireInterpolationOfMetachars) # USER DEFAULT 2: allow single-quoted control characters & sigils
# [[[ OPERATIONS ]]]
$Data::Dumper::Indent = 0;
my hashref $u = { a => 2.2 };
print Dumper( types($u) ) . "\n";
$u = { a => 2.2, b => 3.2 };
print Dumper( types($u) ) . "\n";
$u = { a => 2.2, b => 3 };
print Dumper( types($u) ) . "\n";
$u = { a => 2.2, b => 3.3, c => 5.5, d => 7.7, e => 9.9 };
print Dumper( types($u) ) . "\n";
$u = { a => 2.2, b => 3.3, c => 5.5, d => 7, e => 9.9 };
print Dumper( types($u) ) . "\n";
| {
"pile_set_name": "Github"
} |
package com.planet_ink.coffee_mud.Abilities.Spells;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2002-2020 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Spell_Shatter extends Spell
{
@Override
public String ID()
{
return "Spell_Shatter";
}
private final static String localizedName = CMLib.lang().L("Shatter");
@Override
public String name()
{
return localizedName;
}
@Override
protected int canTargetCode()
{
return CAN_MOBS|CAN_ITEMS;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_MALICIOUS;
}
@Override
public int classificationCode()
{
return Ability.ACODE_SPELL|Ability.DOMAIN_ALTERATION;
}
public Item getItem(final MOB mobTarget)
{
final List<Item> goodPossibilities=new ArrayList<Item>();
final List<Item> possibilities=new ArrayList<Item>();
for(int i=0;i<mobTarget.numItems();i++)
{
final Item item=mobTarget.getItem(i);
if((item!=null)
&&(item.subjectToWearAndTear()))
{
if(item.amWearingAt(Wearable.IN_INVENTORY))
possibilities.add(item);
else
goodPossibilities.add(item);
}
}
if(goodPossibilities.size()>0)
return goodPossibilities.get(CMLib.dice().roll(1,goodPossibilities.size(),-1));
else
if(possibilities.size()>0)
return possibilities.get(CMLib.dice().roll(1,possibilities.size(),-1));
return null;
}
@Override
public int castingQuality(final MOB mob, final Physical target)
{
if(mob!=null)
{
if((target instanceof MOB)&&(mob!=target))
{
final Item I=getItem((MOB)target);
if(I==null)
return Ability.QUALITY_INDIFFERENT;
}
}
return super.castingQuality(mob,target);
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
final MOB mobTarget=getTarget(mob,commands,givenTarget,true,false);
Item target=null;
if(mobTarget!=null)
{
target=getItem(mobTarget);
if(target==null)
return maliciousFizzle(mob,mobTarget,L("<S-NAME> attempt(s) a shattering spell at <T-NAMESELF>, but nothing happens."));
}
if((target==null)&&(mobTarget!=null))
target=getTarget(mobTarget,mobTarget.location(),givenTarget,commands,Wearable.FILTER_ANY);
else
if((target==null)&&(mobTarget==null))
target=getTarget(mob,mob.location(),givenTarget,commands,Wearable.FILTER_UNWORNONLY);
if(target==null)
return false;
Room R=CMLib.map().roomLocation(target);
if(R==null)
R=mob.location();
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?L("<T-NAME> starts vibrating!"):L("^S<S-NAME> utter(s) a shattering spell, causing <T-NAMESELF> to vibrate and resonate.^?"));
final CMMsg msg2=CMClass.getMsg(mob,mobTarget,this,verbalCastCode(mob,target,auto),null);
if((R.okMessage(mob,msg))&&((mobTarget==null)||(R.okMessage(mob,msg2))))
{
R.send(mob,msg);
if(mobTarget!=null)
R.send(mob,msg2);
if((msg.value()<=0)&&(msg2.value()<=0))
{
int damage=100+adjustedLevel(mob,asLevel)-target.phyStats().level();
if(CMLib.flags().isABonusItems(target))
damage=(int)Math.round(CMath.div(damage,2.0));
switch(target.material()&RawMaterial.MATERIAL_MASK)
{
case RawMaterial.MATERIAL_PAPER:
case RawMaterial.MATERIAL_CLOTH:
case RawMaterial.MATERIAL_VEGETATION:
case RawMaterial.MATERIAL_SYNTHETIC:
case RawMaterial.MATERIAL_LEATHER:
case RawMaterial.MATERIAL_FLESH:
damage=(int)Math.round(CMath.div(damage,3.0));
break;
case RawMaterial.MATERIAL_WOODEN:
damage=(int)Math.round(CMath.div(damage,1.5));
break;
case RawMaterial.MATERIAL_GLASS:
case RawMaterial.MATERIAL_ROCK:
damage=(int)Math.round(CMath.mul(damage,2.0));
break;
case RawMaterial.MATERIAL_PRECIOUS:
break;
case RawMaterial.MATERIAL_ENERGY:
case RawMaterial.MATERIAL_GAS:
damage=0;
break;
}
if((damage>0)&&(target.subjectToWearAndTear()))
target.setUsesRemaining(target.usesRemaining()-damage);
else
{
R.show(mob,target,CMMsg.MSG_OK_VISUAL,L("<T-NAME> seems otherwise unaffected."));
return true;
}
if(target.usesRemaining()>0)
target.recoverPhyStats();
else
{
target.setUsesRemaining(100);
if(mobTarget==null)
R.show(mob,target,CMMsg.MSG_OK_VISUAL,L("<T-NAME> is destroyed!"));
else
R.show(mobTarget,target,CMMsg.MSG_OK_VISUAL,L("<T-NAME>, possessed by <S-NAME>, is destroyed!"));
target.unWear();
target.destroy();
R.recoverRoomStats();
}
}
}
}
else
return maliciousFizzle(mob,null,L("<S-NAME> attempt(s) a shattering spell, but nothing happens."));
// return whether it worked
return success;
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
***************************************************************************
Copyright (c) 2010 Qcadoo Limited
Project: Qcadoo MES
Version: 1.4
This file is part of Qcadoo.
Qcadoo is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
***************************************************************************
-->
<ribbonExtension plugin="basic" view="generalParameters"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://schema.qcadoo.org/modules/ribbonExtension"
xsi:schemaLocation="http://schema.qcadoo.org/modules/ribbonExtension http://schema.qcadoo.org/modules/ribbonExtension.xsd">
<group name="parameters">
<bigButton name="dashboardParameters" icon="generateIcon24.png">
<script>
<![CDATA[
this.addOnChangeListener({
onClick: function() {
if(window.canClose()) {
#{form}.performEvent('redirectToDashboardParameters', []);
}
}
});
]]>
</script>
</bigButton>
</group>
</ribbonExtension> | {
"pile_set_name": "Github"
} |
'label':'car' 'bounding box':(1535,420,1567,442)
'label':'car' 'bounding box':(1572,418,1601,446)
'label':'car' 'bounding box':(1549,423,1586,448)
'label':'car' 'bounding box':(1764,404,1920,463)
'label':'car' 'bounding box':(1933,404,2045,468)
'label':'car' 'bounding box':(1257,420,1294,456)
'label':'car' 'bounding box':(1269,411,1327,453)
'label':'car' 'bounding box':(1296,390,1435,464)
'label':'car' 'bounding box':(591,425,696,501)
'label':'car' 'bounding box':(72,414,266,448)
'label':'person' 'bounding box':(-2,497,122,777)
'label':'person' 'bounding box':(497,261,666,865)
'label':'person' 'bounding box':(333,177,730,928)
'label':'person' 'bounding box':(867,187,1176,1023)
'label':'person' 'bounding box':(661,108,1107,1022)
'label':'person' 'bounding box':(1943,649,2045,834)
'label':'person' 'bounding box':(1368,248,1569,815)
| {
"pile_set_name": "Github"
} |
/**
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.testdriver;
import java.io.File;
import java.net.URISyntaxException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.function.UnaryOperator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.asakusafw.testdriver.core.DataModelSinkFactory;
import com.asakusafw.testdriver.core.DataModelSource;
import com.asakusafw.testdriver.core.DataModelSourceFactory;
import com.asakusafw.testdriver.core.DifferenceSinkFactory;
import com.asakusafw.testdriver.core.ModelTester;
import com.asakusafw.testdriver.core.ModelVerifier;
import com.asakusafw.testdriver.core.TestDataToolProvider;
import com.asakusafw.testdriver.core.TestRule;
import com.asakusafw.testdriver.core.VerifierFactory;
import com.asakusafw.testdriver.core.VerifyRuleFactory;
/**
* An abstract super class of test driver outputs.
* @since 0.2.0
* @version 0.7.0
* @param <T> the data model type
*/
public class DriverOutputBase<T> extends DriverInputBase<T> {
private static final Logger LOG = LoggerFactory.getLogger(DriverOutputBase.class);
private VerifierFactory verifier;
private DataModelSinkFactory resultSink;
private DifferenceSinkFactory differenceSink;
private UnaryOperator<DataModelSource> resultFilter;
/**
* Creates a new instance.
* @param callerClass the current context class
* @param testTools the test data tools
* @param name the original input name
* @param modelType the data model type
* @since 0.6.0
*/
public DriverOutputBase(Class<?> callerClass, TestDataToolProvider testTools, String name, Class<T> modelType) {
super(callerClass, testTools, name, modelType);
}
/**
* Returns the verifier.
* @return the verifier, or {@code null} if not defined
* @since 0.2.3
*/
public VerifierFactory getVerifier() {
if (verifier == null) {
return null;
} else if (resultFilter == null) {
return verifier;
} else {
return toVerifierFactory(verifier, resultFilter);
}
}
/**
* Sets the verify rule for this output.
* @param verifier the verifier to set, or {@code null} to clear verifier
* @since 0.2.3
*/
protected final void setVerifier(VerifierFactory verifier) {
if (LOG.isDebugEnabled()) {
LOG.debug("Verifier: name={}, model={}, verifier={}", new Object[] { //$NON-NLS-1$
getName(),
getModelType().getName(),
verifier,
});
}
this.verifier = verifier;
}
/**
* Returns the result data sink for this output.
* @return the result data sink, or {@code null} if not defined
* @since 0.2.3
*/
public DataModelSinkFactory getResultSink() {
return resultSink;
}
/**
* Sets the result data sink for this output.
* The specified object will save the actual result of this.
* @param resultSink the result data sink to set, or {@code null} to clear the sink
* @since 0.2.3
*/
protected final void setResultSink(DataModelSinkFactory resultSink) {
if (LOG.isDebugEnabled()) {
LOG.debug("ResultSink: name={}, model={}, sink={}", new Object[] { //$NON-NLS-1$
getName(),
getModelType().getName(),
resultSink,
});
}
this.resultSink = resultSink;
}
/**
* Returns the difference information sink for this output.
* @return the difference information sink, or {@code null} if not defined
* @since 0.2.3
*/
public DifferenceSinkFactory getDifferenceSink() {
return differenceSink;
}
/**
* Sets the difference information sink for this output.
* The specified object will save the difference from expected result of this.
* @param differenceSink the difference sink to set, {@code null} to clear the sink
* @since 0.2.3
*/
protected final void setDifferenceSink(DifferenceSinkFactory differenceSink) {
if (LOG.isDebugEnabled()) {
LOG.debug("DifferenceSink: name={}, model={}, sink={}", new Object[] { //$NON-NLS-1$
getName(),
getModelType().getName(),
differenceSink,
});
}
this.differenceSink = differenceSink;
}
/**
* Sets the data model source filter for actual results of this output.
* @param filter the source filter
* @since 0.7.0
*/
protected final void setResultFilter(UnaryOperator<DataModelSource> filter) {
this.resultFilter = filter;
}
/**
* Converts an output path to {@link DataModelSinkFactory} to write to the path.
* @param path the output path
* @return the target sink factory
* @since 0.6.0
*/
protected final DataModelSinkFactory toDataModelSinkFactory(String path) {
return getTestTools().getDataModelSinkFactory(toOutputUri(path));
}
/**
* Converts an output path to {@link DataModelSinkFactory} to write to the path.
* @param path the output path
* @return the target sink factory
* @since 0.6.0
*/
protected final DataModelSinkFactory toDataModelSinkFactory(File path) {
return getTestTools().getDataModelSinkFactory(path.toURI());
}
/**
* Converts an output path to {@link DifferenceSinkFactory} to write to the path.
* @param path the output path
* @return the target sink factory
* @since 0.6.0
*/
protected final DifferenceSinkFactory toDifferenceSinkFactory(String path) {
return getTestTools().getDifferenceSinkFactory(toOutputUri(path));
}
/**
* Converts an output path to {@link DifferenceSinkFactory} to write to the path.
* @param path the output path
* @return the target sink factory
* @since 0.6.0
*/
protected final DifferenceSinkFactory toDifferenceSinkFactory(File path) {
return getTestTools().getDifferenceSinkFactory(path.toURI());
}
/**
* Converts {@link ModelVerifier} into {@link VerifyRuleFactory}.
* @param rulePath the path which represents the verification rule description
* @param extraRules the extra verification rules
* @return the equivalent {@link VerifyRuleFactory}
* @since 0.6.0
*/
protected final VerifyRuleFactory toVerifyRuleFactory(
String rulePath,
List<? extends ModelTester<? super T>> extraRules) {
try {
TestDataToolProvider tools = getTestTools();
List<TestRule> fragments = new ArrayList<>();
for (ModelTester<? super T> tester : extraRules) {
fragments.add(tools.toVerifyRuleFragment(getDataModelDefinition(), tester));
}
return tools.getVerifyRuleFactory(toUri(rulePath), fragments);
} catch (URISyntaxException e) {
throw new IllegalStateException(MessageFormat.format(
"Invalid rule path: {0}", //$NON-NLS-1$
rulePath), e);
}
}
/**
* Converts {@link ModelVerifier} into {@link VerifyRuleFactory}.
* @param modelVerifier the original verifier
* @return the equivalent {@link VerifyRuleFactory}
* @since 0.6.0
*/
protected final VerifyRuleFactory toVerifyRuleFactory(ModelVerifier<? super T> modelVerifier) {
return getTestTools().toVerifyRuleFactory(getDataModelDefinition(), modelVerifier);
}
/**
* Converts a pair of expected data set factory and verify rule factory into {@link VerifyRuleFactory}.
* @param expectedFactory the expected data set factory
* @param ruleFactory the verification rule factory
* @return the {@link VerifierFactory} which provides a verifier using the expected data set and verification rule
* @since 0.6.0
*/
protected final VerifierFactory toVerifierFactory(
DataModelSourceFactory expectedFactory, VerifyRuleFactory ruleFactory) {
return getTestTools().toVerifierFactory(expectedFactory, ruleFactory);
}
}
| {
"pile_set_name": "Github"
} |
{
"word": "Leg",
"definitions": [
"Each of the limbs on which a person or animal walks and stands.",
"A leg of an animal or bird as food.",
"A part of a garment covering a leg or part of a leg.",
"(with reference to a ball, especially in golf) sufficient momentum to reach the desired point.",
"(with reference to a product or idea) sustained popularity or success.",
"Each of the supports of a chair, table, or other structure.",
"A section or stage of a journey or process.",
"A run made on a single tack.",
"(in soccer and other sports) each of two games constituting a round of a competition.",
"A section of a relay or other race done in stages.",
"A single game in a darts match.",
"A branch of a forked object.",
"The half of the field (as divided lengthways through the pitch) away from which the batsman's feet are pointed when standing to receive the ball.",
"A deferential gesture made by drawing back one leg and bending it while keeping the front leg straight."
],
"parts-of-speech": "Noun"
} | {
"pile_set_name": "Github"
} |
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package runtime
import "k8s.io/apimachinery/pkg/runtime/schema"
// SetGroupVersionKind satisfies the ObjectKind interface for all objects that embed TypeMeta
func (obj *TypeMeta) SetGroupVersionKind(gvk schema.GroupVersionKind) {
obj.APIVersion, obj.Kind = gvk.ToAPIVersionAndKind()
}
// GroupVersionKind satisfies the ObjectKind interface for all objects that embed TypeMeta
func (obj *TypeMeta) GroupVersionKind() schema.GroupVersionKind {
return schema.FromAPIVersionAndKind(obj.APIVersion, obj.Kind)
}
func (obj *TypeMeta) GetObjectKind() schema.ObjectKind { return obj }
| {
"pile_set_name": "Github"
} |
"""Imported from the recipes section of the itertools documentation.
All functions taken from the recipes section of the itertools library docs
[1]_.
Some backward-compatible usability improvements have been made.
.. [1] http://docs.python.org/library/itertools.html#recipes
"""
from collections import deque
from itertools import (
chain, combinations, count, cycle, groupby, islice, repeat, starmap, tee
)
import operator
from random import randrange, sample, choice
from six import PY2
from six.moves import filter, filterfalse, map, range, zip, zip_longest
__all__ = [
'accumulate',
'all_equal',
'consume',
'dotproduct',
'first_true',
'flatten',
'grouper',
'iter_except',
'ncycles',
'nth',
'nth_combination',
'padnone',
'pairwise',
'partition',
'powerset',
'prepend',
'quantify',
'random_combination_with_replacement',
'random_combination',
'random_permutation',
'random_product',
'repeatfunc',
'roundrobin',
'tabulate',
'tail',
'take',
'unique_everseen',
'unique_justseen',
]
def accumulate(iterable, func=operator.add):
"""
Return an iterator whose items are the accumulated results of a function
(specified by the optional *func* argument) that takes two arguments.
By default, returns accumulated sums with :func:`operator.add`.
>>> list(accumulate([1, 2, 3, 4, 5])) # Running sum
[1, 3, 6, 10, 15]
>>> list(accumulate([1, 2, 3], func=operator.mul)) # Running product
[1, 2, 6]
>>> list(accumulate([0, 1, -1, 2, 3, 2], func=max)) # Running maximum
[0, 1, 1, 2, 3, 3]
This function is available in the ``itertools`` module for Python 3.2 and
greater.
"""
it = iter(iterable)
try:
total = next(it)
except StopIteration:
return
else:
yield total
for element in it:
total = func(total, element)
yield total
def take(n, iterable):
"""Return first *n* items of the iterable as a list.
>>> take(3, range(10))
[0, 1, 2]
>>> take(5, range(3))
[0, 1, 2]
Effectively a short replacement for ``next`` based iterator consumption
when you want more than one item, but less than the whole iterator.
"""
return list(islice(iterable, n))
def tabulate(function, start=0):
"""Return an iterator over the results of ``func(start)``,
``func(start + 1)``, ``func(start + 2)``...
*func* should be a function that accepts one integer argument.
If *start* is not specified it defaults to 0. It will be incremented each
time the iterator is advanced.
>>> square = lambda x: x ** 2
>>> iterator = tabulate(square, -3)
>>> take(4, iterator)
[9, 4, 1, 0]
"""
return map(function, count(start))
def tail(n, iterable):
"""Return an iterator over the last *n* items of *iterable*.
>>> t = tail(3, 'ABCDEFG')
>>> list(t)
['E', 'F', 'G']
"""
return iter(deque(iterable, maxlen=n))
def consume(iterator, n=None):
"""Advance *iterable* by *n* steps. If *n* is ``None``, consume it
entirely.
Efficiently exhausts an iterator without returning values. Defaults to
consuming the whole iterator, but an optional second argument may be
provided to limit consumption.
>>> i = (x for x in range(10))
>>> next(i)
0
>>> consume(i, 3)
>>> next(i)
4
>>> consume(i)
>>> next(i)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
StopIteration
If the iterator has fewer items remaining than the provided limit, the
whole iterator will be consumed.
>>> i = (x for x in range(3))
>>> consume(i, 5)
>>> next(i)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
StopIteration
"""
# Use functions that consume iterators at C speed.
if n is None:
# feed the entire iterator into a zero-length deque
deque(iterator, maxlen=0)
else:
# advance to the empty slice starting at position n
next(islice(iterator, n, n), None)
def nth(iterable, n, default=None):
"""Returns the nth item or a default value.
>>> l = range(10)
>>> nth(l, 3)
3
>>> nth(l, 20, "zebra")
'zebra'
"""
return next(islice(iterable, n, None), default)
def all_equal(iterable):
"""
Returns ``True`` if all the elements are equal to each other.
>>> all_equal('aaaa')
True
>>> all_equal('aaab')
False
"""
g = groupby(iterable)
return next(g, True) and not next(g, False)
def quantify(iterable, pred=bool):
"""Return the how many times the predicate is true.
>>> quantify([True, False, True])
2
"""
return sum(map(pred, iterable))
def padnone(iterable):
"""Returns the sequence of elements and then returns ``None`` indefinitely.
>>> take(5, padnone(range(3)))
[0, 1, 2, None, None]
Useful for emulating the behavior of the built-in :func:`map` function.
See also :func:`padded`.
"""
return chain(iterable, repeat(None))
def ncycles(iterable, n):
"""Returns the sequence elements *n* times
>>> list(ncycles(["a", "b"], 3))
['a', 'b', 'a', 'b', 'a', 'b']
"""
return chain.from_iterable(repeat(tuple(iterable), n))
def dotproduct(vec1, vec2):
"""Returns the dot product of the two iterables.
>>> dotproduct([10, 10], [20, 20])
400
"""
return sum(map(operator.mul, vec1, vec2))
def flatten(listOfLists):
"""Return an iterator flattening one level of nesting in a list of lists.
>>> list(flatten([[0, 1], [2, 3]]))
[0, 1, 2, 3]
See also :func:`collapse`, which can flatten multiple levels of nesting.
"""
return chain.from_iterable(listOfLists)
def repeatfunc(func, times=None, *args):
"""Call *func* with *args* repeatedly, returning an iterable over the
results.
If *times* is specified, the iterable will terminate after that many
repetitions:
>>> from operator import add
>>> times = 4
>>> args = 3, 5
>>> list(repeatfunc(add, times, *args))
[8, 8, 8, 8]
If *times* is ``None`` the iterable will not terminate:
>>> from random import randrange
>>> times = None
>>> args = 1, 11
>>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
[2, 4, 8, 1, 8, 4]
"""
if times is None:
return starmap(func, repeat(args))
return starmap(func, repeat(args, times))
def pairwise(iterable):
"""Returns an iterator of paired items, overlapping, from the original
>>> take(4, pairwise(count()))
[(0, 1), (1, 2), (2, 3), (3, 4)]
"""
a, b = tee(iterable)
next(b, None)
return zip(a, b)
def grouper(n, iterable, fillvalue=None):
"""Collect data into fixed-length chunks or blocks.
>>> list(grouper(3, 'ABCDEFG', 'x'))
[('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
"""
args = [iter(iterable)] * n
return zip_longest(fillvalue=fillvalue, *args)
def roundrobin(*iterables):
"""Yields an item from each iterable, alternating between them.
>>> list(roundrobin('ABC', 'D', 'EF'))
['A', 'D', 'E', 'B', 'F', 'C']
This function produces the same output as :func:`interleave_longest`, but
may perform better for some inputs (in particular when the number of
iterables is small).
"""
# Recipe credited to George Sakkis
pending = len(iterables)
if PY2:
nexts = cycle(iter(it).next for it in iterables)
else:
nexts = cycle(iter(it).__next__ for it in iterables)
while pending:
try:
for next in nexts:
yield next()
except StopIteration:
pending -= 1
nexts = cycle(islice(nexts, pending))
def partition(pred, iterable):
"""
Returns a 2-tuple of iterables derived from the input iterable.
The first yields the items that have ``pred(item) == False``.
The second yields the items that have ``pred(item) == True``.
>>> is_odd = lambda x: x % 2 != 0
>>> iterable = range(10)
>>> even_items, odd_items = partition(is_odd, iterable)
>>> list(even_items), list(odd_items)
([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
"""
# partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
t1, t2 = tee(iterable)
return filterfalse(pred, t1), filter(pred, t2)
def powerset(iterable):
"""Yields all possible subsets of the iterable.
>>> list(powerset([1, 2, 3]))
[(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
:func:`powerset` will operate on iterables that aren't :class:`set`
instances, so repeated elements in the input will produce repeated elements
in the output. Use :func:`unique_everseen` on the input to avoid generating
duplicates:
>>> seq = [1, 1, 0]
>>> list(powerset(seq))
[(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
>>> from more_itertools import unique_everseen
>>> list(powerset(unique_everseen(seq)))
[(), (1,), (0,), (1, 0)]
"""
s = list(iterable)
return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
def unique_everseen(iterable, key=None):
"""
Yield unique elements, preserving order.
>>> list(unique_everseen('AAAABBBCCDAABBB'))
['A', 'B', 'C', 'D']
>>> list(unique_everseen('ABBCcAD', str.lower))
['A', 'B', 'C', 'D']
Sequences with a mix of hashable and unhashable items can be used.
The function will be slower (i.e., `O(n^2)`) for unhashable items.
"""
seenset = set()
seenset_add = seenset.add
seenlist = []
seenlist_add = seenlist.append
if key is None:
for element in iterable:
try:
if element not in seenset:
seenset_add(element)
yield element
except TypeError:
if element not in seenlist:
seenlist_add(element)
yield element
else:
for element in iterable:
k = key(element)
try:
if k not in seenset:
seenset_add(k)
yield element
except TypeError:
if k not in seenlist:
seenlist_add(k)
yield element
def unique_justseen(iterable, key=None):
"""Yields elements in order, ignoring serial duplicates
>>> list(unique_justseen('AAAABBBCCDAABBB'))
['A', 'B', 'C', 'D', 'A', 'B']
>>> list(unique_justseen('ABBCcAD', str.lower))
['A', 'B', 'C', 'A', 'D']
"""
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
def iter_except(func, exception, first=None):
"""Yields results from a function repeatedly until an exception is raised.
Converts a call-until-exception interface to an iterator interface.
Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
to end the loop.
>>> l = [0, 1, 2]
>>> list(iter_except(l.pop, IndexError))
[2, 1, 0]
"""
try:
if first is not None:
yield first()
while 1:
yield func()
except exception:
pass
def first_true(iterable, default=None, pred=None):
"""
Returns the first true value in the iterable.
If no true value is found, returns *default*
If *pred* is not None, returns the first item for which
``pred(item) == True`` .
>>> first_true(range(10))
1
>>> first_true(range(10), pred=lambda x: x > 5)
6
>>> first_true(range(10), default='missing', pred=lambda x: x > 9)
'missing'
"""
return next(filter(pred, iterable), default)
def random_product(*args, **kwds):
"""Draw an item at random from each of the input iterables.
>>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
('c', 3, 'Z')
If *repeat* is provided as a keyword argument, that many items will be
drawn from each iterable.
>>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
('a', 2, 'd', 3)
This equivalent to taking a random selection from
``itertools.product(*args, **kwarg)``.
"""
pools = [tuple(pool) for pool in args] * kwds.get('repeat', 1)
return tuple(choice(pool) for pool in pools)
def random_permutation(iterable, r=None):
"""Return a random *r* length permutation of the elements in *iterable*.
If *r* is not specified or is ``None``, then *r* defaults to the length of
*iterable*.
>>> random_permutation(range(5)) # doctest:+SKIP
(3, 4, 0, 1, 2)
This equivalent to taking a random selection from
``itertools.permutations(iterable, r)``.
"""
pool = tuple(iterable)
r = len(pool) if r is None else r
return tuple(sample(pool, r))
def random_combination(iterable, r):
"""Return a random *r* length subsequence of the elements in *iterable*.
>>> random_combination(range(5), 3) # doctest:+SKIP
(2, 3, 4)
This equivalent to taking a random selection from
``itertools.combinations(iterable, r)``.
"""
pool = tuple(iterable)
n = len(pool)
indices = sorted(sample(range(n), r))
return tuple(pool[i] for i in indices)
def random_combination_with_replacement(iterable, r):
"""Return a random *r* length subsequence of elements in *iterable*,
allowing individual elements to be repeated.
>>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
(0, 0, 1, 2, 2)
This equivalent to taking a random selection from
``itertools.combinations_with_replacement(iterable, r)``.
"""
pool = tuple(iterable)
n = len(pool)
indices = sorted(randrange(n) for i in range(r))
return tuple(pool[i] for i in indices)
def nth_combination(iterable, r, index):
"""Equivalent to ``list(combinations(iterable, r))[index]``.
The subsequences of *iterable* that are of length *r* can be ordered
lexicographically. :func:`nth_combination` computes the subsequence at
sort position *index* directly, without computing the previous
subsequences.
"""
pool = tuple(iterable)
n = len(pool)
if (r < 0) or (r > n):
raise ValueError
c = 1
k = min(r, n - r)
for i in range(1, k + 1):
c = c * (n - k + i) // i
if index < 0:
index += c
if (index < 0) or (index >= c):
raise IndexError
result = []
while r:
c, n, r = c * r // n, n - 1, r - 1
while index >= c:
index -= c
c, n = c * (n - r) // n, n - 1
result.append(pool[-1 - n])
return tuple(result)
def prepend(value, iterator):
"""Yield *value*, followed by the elements in *iterator*.
>>> value = '0'
>>> iterator = ['1', '2', '3']
>>> list(prepend(value, iterator))
['0', '1', '2', '3']
To prepend multiple values, see :func:`itertools.chain`.
"""
return chain([value], iterator)
| {
"pile_set_name": "Github"
} |
#include <stdlib.h>
#include "buffer.h"
#include "chunk.h"
#include "cmark.h"
#include "utf8.h"
#include "render.h"
#include "node.h"
static CMARK_INLINE void S_cr(cmark_renderer *renderer) {
if (renderer->need_cr < 1) {
renderer->need_cr = 1;
}
}
static CMARK_INLINE void S_blankline(cmark_renderer *renderer) {
if (renderer->need_cr < 2) {
renderer->need_cr = 2;
}
}
static void S_out(cmark_renderer *renderer, const char *source, bool wrap,
cmark_escaping escape) {
int length = strlen(source);
unsigned char nextc;
int32_t c;
int i = 0;
int last_nonspace;
int len;
cmark_chunk remainder = cmark_chunk_literal("");
int k = renderer->buffer->size - 1;
wrap = wrap && !renderer->no_linebreaks;
if (renderer->in_tight_list_item && renderer->need_cr > 1) {
renderer->need_cr = 1;
}
while (renderer->need_cr) {
if (k < 0 || renderer->buffer->ptr[k] == '\n') {
k -= 1;
} else {
cmark_strbuf_putc(renderer->buffer, '\n');
if (renderer->need_cr > 1) {
cmark_strbuf_put(renderer->buffer, renderer->prefix->ptr,
renderer->prefix->size);
}
}
renderer->column = 0;
renderer->last_breakable = 0;
renderer->begin_line = true;
renderer->begin_content = true;
renderer->need_cr -= 1;
}
while (i < length) {
if (renderer->begin_line) {
cmark_strbuf_put(renderer->buffer, renderer->prefix->ptr,
renderer->prefix->size);
// note: this assumes prefix is ascii:
renderer->column = renderer->prefix->size;
}
len = cmark_utf8proc_iterate((const uint8_t *)source + i, length - i, &c);
if (len == -1) { // error condition
return; // return without rendering rest of string
}
nextc = source[i + len];
if (c == 32 && wrap) {
if (!renderer->begin_line) {
last_nonspace = renderer->buffer->size;
cmark_strbuf_putc(renderer->buffer, ' ');
renderer->column += 1;
renderer->begin_line = false;
renderer->begin_content = false;
// skip following spaces
while (source[i + 1] == ' ') {
i++;
}
// We don't allow breaks that make a digit the first character
// because this causes problems with commonmark output.
if (!cmark_isdigit(source[i + 1])) {
renderer->last_breakable = last_nonspace;
}
}
} else if (escape == LITERAL) {
if (c == 10) {
cmark_strbuf_putc(renderer->buffer, '\n');
renderer->column = 0;
renderer->begin_line = true;
renderer->begin_content = true;
renderer->last_breakable = 0;
} else {
cmark_render_code_point(renderer, c);
renderer->begin_line = false;
// we don't set 'begin_content' to false til we've
// finished parsing a digit. Reason: in commonmark
// we need to escape a potential list marker after
// a digit:
renderer->begin_content =
renderer->begin_content && cmark_isdigit(c) == 1;
}
} else {
(renderer->outc)(renderer, escape, c, nextc);
renderer->begin_line = false;
renderer->begin_content =
renderer->begin_content && cmark_isdigit(c) == 1;
}
// If adding the character went beyond width, look for an
// earlier place where the line could be broken:
if (renderer->width > 0 && renderer->column > renderer->width &&
!renderer->begin_line && renderer->last_breakable > 0) {
// copy from last_breakable to remainder
cmark_chunk_set_cstr(renderer->mem, &remainder,
(char *)renderer->buffer->ptr +
renderer->last_breakable + 1);
// truncate at last_breakable
cmark_strbuf_truncate(renderer->buffer, renderer->last_breakable);
// add newline, prefix, and remainder
cmark_strbuf_putc(renderer->buffer, '\n');
cmark_strbuf_put(renderer->buffer, renderer->prefix->ptr,
renderer->prefix->size);
cmark_strbuf_put(renderer->buffer, remainder.data, remainder.len);
renderer->column = renderer->prefix->size + remainder.len;
cmark_chunk_free(renderer->mem, &remainder);
renderer->last_breakable = 0;
renderer->begin_line = false;
renderer->begin_content = false;
}
i += len;
}
}
// Assumes no newlines, assumes ascii content:
void cmark_render_ascii(cmark_renderer *renderer, const char *s) {
int origsize = renderer->buffer->size;
cmark_strbuf_puts(renderer->buffer, s);
renderer->column += renderer->buffer->size - origsize;
}
void cmark_render_code_point(cmark_renderer *renderer, uint32_t c) {
cmark_utf8proc_encode_char(c, renderer->buffer);
renderer->column += 1;
}
char *cmark_render(cmark_node *root, int options, int width,
void (*outc)(cmark_renderer *, cmark_escaping, int32_t,
unsigned char),
int (*render_node)(cmark_renderer *renderer,
cmark_node *node,
cmark_event_type ev_type, int options)) {
cmark_mem *mem = cmark_node_mem(root);
cmark_strbuf pref = CMARK_BUF_INIT(mem);
cmark_strbuf buf = CMARK_BUF_INIT(mem);
cmark_node *cur;
cmark_event_type ev_type;
char *result;
cmark_iter *iter = cmark_iter_new(root);
cmark_renderer renderer = {mem, &buf, &pref, 0, width,
0, 0, true, true, false,
false, outc, S_cr, S_blankline, S_out};
while ((ev_type = cmark_iter_next(iter)) != CMARK_EVENT_DONE) {
cur = cmark_iter_get_node(iter);
if (!render_node(&renderer, cur, ev_type, options)) {
// a false value causes us to skip processing
// the node's contents. this is used for
// autolinks.
cmark_iter_reset(iter, cur, CMARK_EVENT_EXIT);
}
}
// ensure final newline
if (renderer.buffer->size == 0 || renderer.buffer->ptr[renderer.buffer->size - 1] != '\n') {
cmark_strbuf_putc(renderer.buffer, '\n');
}
result = (char *)cmark_strbuf_detach(renderer.buffer);
cmark_iter_free(iter);
cmark_strbuf_free(renderer.prefix);
cmark_strbuf_free(renderer.buffer);
return result;
}
| {
"pile_set_name": "Github"
} |
//
// ZZFDRQSuccessCell.m
// ZZFLEXDemo
//
// Created by 李伯坤 on 2018/1/24.
// Copyright © 2018年 李伯坤. All rights reserved.
//
#import "ZZFDRQSuccessCell.h"
@implementation ZZFDRQSuccessCell
+ (CGFloat)viewHeightByDataModel:(id)dataModel
{
return 180;
}
- (void)setViewDataModel:(UIColor *)dataModel
{
[self setBackgroundColor:dataModel];
}
- (instancetype)initWithFrame:(CGRect)frame
{
if (self = [super initWithFrame:frame]) {
[self setBackgroundColor:[UIColor whiteColor]];
self.contentView.addLabel(1001)
.text(@"请求成功").font([UIFont systemFontOfSize:15])
.masonry(^(UIView *senderView, MASConstraintMaker *make) {
make.center.mas_equalTo(0);
});
}
return self;
}
@end
| {
"pile_set_name": "Github"
} |
// Dstl (c) Crown Copyright 2017
package uk.gov.dstl.baleen.annotators.cleaners;
import static org.junit.Assert.assertEquals;
import org.apache.uima.analysis_engine.AnalysisEngine;
import org.apache.uima.fit.factory.AnalysisEngineFactory;
import org.apache.uima.fit.util.JCasUtil;
import org.junit.Test;
import uk.gov.dstl.baleen.annotators.testing.Annotations;
import uk.gov.dstl.baleen.annotators.testing.AnnotatorTestBase;
import uk.gov.dstl.baleen.types.common.Person;
import uk.gov.dstl.baleen.types.semantic.ReferenceTarget;
public class CorefCapitalisationAndApostropheTest extends AnnotatorTestBase {
private static final String JAMES_UC = "JAMES";
private static final String JAMES = "James";
private static final String TEXT = "James went to London. JAMES has also been to Edinburgh.";
@Test
public void testNoExistingReferents() throws Exception {
AnalysisEngine corefCapAE =
AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class);
jCas.setDocumentText(TEXT);
Annotations.createPerson(jCas, 0, 5, JAMES);
Annotations.createPerson(jCas, 22, 27, JAMES_UC);
corefCapAE.process(jCas);
assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size());
ReferenceTarget rt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0);
Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0);
Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1);
assertEquals(rt, p1t.getReferent());
assertEquals(rt, p2t.getReferent());
}
@Test
public void testOneExistingReferent() throws Exception {
AnalysisEngine corefCapAE =
AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class);
jCas.setDocumentText(TEXT);
ReferenceTarget rt = Annotations.createReferenceTarget(jCas);
Person p1 = Annotations.createPerson(jCas, 0, 5, JAMES);
p1.setReferent(rt);
Annotations.createPerson(jCas, 22, 27, JAMES_UC);
corefCapAE.process(jCas);
assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size());
ReferenceTarget rtt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0);
Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0);
Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1);
assertEquals(rtt, p1t.getReferent());
assertEquals(rtt, p2t.getReferent());
}
@Test
public void testTwoExistingReferent() throws Exception {
AnalysisEngine corefCapAE =
AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class);
jCas.setDocumentText(TEXT + " James has not been to Guatemala.");
ReferenceTarget rt = Annotations.createReferenceTarget(jCas);
Person p1 = Annotations.createPerson(jCas, 0, 5, JAMES);
p1.setReferent(rt);
Person p2 = Annotations.createPerson(jCas, 22, 27, JAMES_UC);
p2.setReferent(rt);
Annotations.createPerson(jCas, 56, 61, JAMES);
corefCapAE.process(jCas);
assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size());
ReferenceTarget rtt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0);
Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0);
Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1);
Person p3t = JCasUtil.selectByIndex(jCas, Person.class, 2);
assertEquals(rtt, p1t.getReferent());
assertEquals(rtt, p2t.getReferent());
assertEquals(rtt, p3t.getReferent());
}
@Test
public void testExistingReferentsMerge() throws Exception {
AnalysisEngine corefCapAE =
AnalysisEngineFactory.createEngine(
CorefCapitalisationAndApostrophe.class, "mergeReferents", true);
jCas.setDocumentText(TEXT);
ReferenceTarget rt1 = Annotations.createReferenceTarget(jCas);
ReferenceTarget rt2 = Annotations.createReferenceTarget(jCas);
Person p1 = Annotations.createPerson(jCas, 0, 5, JAMES);
p1.setReferent(rt1);
Person p2 = Annotations.createPerson(jCas, 22, 27, JAMES_UC);
p2.setReferent(rt2);
corefCapAE.process(jCas);
assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size());
ReferenceTarget rtt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0);
Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0);
Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1);
assertEquals(rtt, p1t.getReferent());
assertEquals(rtt, p2t.getReferent());
}
@Test
public void testExistingReferentsNoMerge() throws Exception {
AnalysisEngine corefCapAE =
AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class);
jCas.setDocumentText(TEXT);
ReferenceTarget rt1 = Annotations.createReferenceTarget(jCas);
ReferenceTarget rt2 = Annotations.createReferenceTarget(jCas);
Person p1 = Annotations.createPerson(jCas, 0, 5, JAMES);
p1.setReferent(rt1);
Person p2 = Annotations.createPerson(jCas, 22, 27, JAMES_UC);
p2.setReferent(rt2);
corefCapAE.process(jCas);
assertEquals(2, JCasUtil.select(jCas, ReferenceTarget.class).size());
ReferenceTarget rt1t = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0);
ReferenceTarget rt2t = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 1);
Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0);
Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1);
assertEquals(rt1t, p1t.getReferent());
assertEquals(rt2t, p2t.getReferent());
}
@Test
public void testMissingValue() throws Exception {
AnalysisEngine corefCapAE =
AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class);
jCas.setDocumentText(TEXT);
Person p1 = new Person(jCas);
p1.setBegin(0);
p1.setEnd(5);
p1.addToIndexes();
Annotations.createPerson(jCas, 22, 27, JAMES_UC);
corefCapAE.process(jCas);
assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size());
ReferenceTarget rt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0);
Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0);
Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1);
assertEquals(rt, p1t.getReferent());
assertEquals(rt, p2t.getReferent());
}
@Test
public void testApostropheS() throws Exception {
AnalysisEngine corefCapAE =
AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class);
jCas.setDocumentText("Naomi went to London. Naomi's train was late.");
Annotations.createPerson(jCas, 0, 5, "Naomi");
Annotations.createPerson(jCas, 22, 29, "Naomi's");
corefCapAE.process(jCas);
assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size());
ReferenceTarget rt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0);
Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0);
Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1);
assertEquals(rt, p1t.getReferent());
assertEquals(rt, p2t.getReferent());
}
@Test
public void testSApostrophe() throws Exception {
AnalysisEngine corefCapAE =
AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class);
jCas.setDocumentText("James went to London. James' train was late.");
Annotations.createPerson(jCas, 0, 5, "James");
Annotations.createPerson(jCas, 22, 28, "James'");
corefCapAE.process(jCas);
assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size());
ReferenceTarget rt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0);
Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0);
Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1);
assertEquals(rt, p1t.getReferent());
assertEquals(rt, p2t.getReferent());
}
}
| {
"pile_set_name": "Github"
} |
//
// CSLayoutSwitcherHostingView.m
// CocoaSplit
//
// Created by Zakk on 3/5/17.
// Copyright © 2017 Zakk. All rights reserved.
//
#import "CSLayoutSwitcherHostingView.h"
#import <Quartz/Quartz.h>
#import "CSPreviewGLLayer.h"
#import "CSLayoutSwitcherWithPreviewWindowController.h"
@implementation CSLayoutSwitcherHostingView
@end
| {
"pile_set_name": "Github"
} |
using CSharpGL;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
namespace PointLight
{
/// <summary>
/// Render a Cube with single color in modern opengl.
/// </summary>
public class LightPositionNode : PickableNode, IRenderable
{
private const string inPosition = "inPosition";
private const string projectionMat = "projectionMat";
private const string viewMat = "viewMat";
private const string modelMat = "modelMat";
private const string color = "color";
private const string vertexCode =
@"#version 330 core
in vec3 " + inPosition + @";
uniform mat4 " + projectionMat + @";
uniform mat4 " + viewMat + @";
uniform mat4 " + modelMat + @";
void main(void) {
gl_Position = projectionMat * viewMat * modelMat * vec4(inPosition, 1.0);
}
";
private const string fragmentCode =
@"#version 330 core
uniform vec3 " + color + @" = vec3(1, 1, 1);
layout(location = 0) out vec4 outColor;
//out vec4 outColor;
void main(void) {
outColor = vec4(color, 1);
}
";
private CSharpGL.PointLight light;
/// <summary>
/// Render propeller in modern opengl.
/// </summary>
/// <returns></returns>
public static LightPositionNode Create()
{
var vs = new VertexShader(vertexCode);
var fs = new FragmentShader(fragmentCode);
var provider = new ShaderArray(vs, fs);
var map = new AttributeMap();
map.Add(inPosition, CubeModel.strPosition);
var builder = new RenderMethodBuilder(provider, map, new PolygonModeSwitch(PolygonMode.Line), new LineWidthSwitch(3));
var node = new LightPositionNode(new CubeModel(), CubeModel.strPosition, builder);
node.Initialize();
return node;
}
/// <summary>
/// Render propeller in legacy opengl.
/// </summary>
private LightPositionNode(IBufferSource model, string positionNameInIBufferable, params RenderMethodBuilder[] builders)
: base(model, positionNameInIBufferable, builders)
{
this.ModelSize = new vec3(1, 1, 1) * 0.3f;
this.AutoRotate = true;
}
/// <summary>
///
/// </summary>
public bool AutoRotate { get; set; }
private ThreeFlags enableRendering = ThreeFlags.BeforeChildren | ThreeFlags.Children | ThreeFlags.AfterChildren;
/// <summary>
/// Render before/after children? Render children?
/// RenderAction cares about this property. Other actions, maybe, maybe not, your choice.
/// </summary>
public ThreeFlags EnableRendering
{
get { return this.enableRendering; }
set { this.enableRendering = value; }
}
/// <summary>
///
/// </summary>
/// <param name="arg"></param>
public void RenderBeforeChildren(RenderEventArgs arg)
{
if (!this.IsInitialized) { this.Initialize(); }
if (this.AutoRotate)
{
float delta = 1;
this.RotationAngle += delta * 31;
var position = new vec3(
(float)Math.Cos(this.RotationAngle / 5 * Math.PI / 180.0),
(float)Math.Cos(this.RotationAngle / 50 * Math.PI / 180.0),
(float)Math.Sin(this.RotationAngle / 5 * Math.PI / 180.0)) * 9;
this.light.Position = position;
this.WorldPosition = position;
}
ICamera camera = arg.Camera;
mat4 projection = camera.GetProjectionMatrix();
mat4 view = camera.GetViewMatrix();
mat4 model = this.GetModelMatrix();
var method = this.RenderUnit.Methods[0]; // the only render unit in this node.
ShaderProgram program = method.Program;
program.SetUniform(projectionMat, projection);
program.SetUniform(viewMat, view);
program.SetUniform(modelMat, model);
method.Render();
}
public void RenderAfterChildren(RenderEventArgs arg)
{
}
public void SetLight(CSharpGL.PointLight light)
{
this.light = light;
}
class CubeModel : IBufferSource
{
public vec3 ModelSize { get; private set; }
public CubeModel()
{
this.ModelSize = new vec3(xLength * 2, yLength * 2, zLength * 2);
}
public const string strPosition = "position";
private VertexBuffer positionBuffer;
private IDrawCommand drawCmd;
#region IBufferable 成员
public IEnumerable<VertexBuffer> GetVertexAttribute(string bufferName)
{
if (bufferName == strPosition)
{
if (this.positionBuffer == null)
{
this.positionBuffer = positions.GenVertexBuffer(VBOConfig.Vec3, BufferUsage.StaticDraw);
}
yield return this.positionBuffer;
}
else
{
throw new ArgumentException();
}
}
public IEnumerable<IDrawCommand> GetDrawCommand()
{
if (this.drawCmd == null)
{
this.drawCmd = new DrawArraysCmd(DrawMode.TriangleStrip, positions.Length);
}
yield return this.drawCmd;
}
#endregion
private const float xLength = 0.5f;
private const float yLength = 0.5f;
private const float zLength = 0.5f;
/// <summary>
/// four vertexes.
/// </summary>
private static readonly vec3[] positions = new vec3[]
{
new vec3(+xLength, +yLength, +zLength),// 0
new vec3(+xLength, -yLength, +zLength),// 1
new vec3(+xLength, +yLength, -zLength),// 2
new vec3(+xLength, -yLength, -zLength),// 3
new vec3(-xLength, -yLength, -zLength),// 4
new vec3(+xLength, -yLength, +zLength),// 5
new vec3(-xLength, -yLength, +zLength),// 6
new vec3(+xLength, +yLength, +zLength),// 7
new vec3(-xLength, +yLength, +zLength),// 8
new vec3(+xLength, +yLength, -zLength),// 9
new vec3(-xLength, +yLength, -zLength),// 10
new vec3(-xLength, -yLength, -zLength),// 11
new vec3(-xLength, +yLength, +zLength),// 12
new vec3(-xLength, -yLength, +zLength),// 13
};
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.android;
import static com.facebook.buck.android.aapt.RDotTxtEntry.IdType.INT;
import static com.facebook.buck.android.aapt.RDotTxtEntry.IdType.INT_ARRAY;
import static com.facebook.buck.android.aapt.RDotTxtEntry.RType.ATTR;
import static com.facebook.buck.android.aapt.RDotTxtEntry.RType.ID;
import static com.facebook.buck.android.aapt.RDotTxtEntry.RType.STYLEABLE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import com.facebook.buck.android.MergeAndroidResourcesStep.DuplicateResourceException;
import com.facebook.buck.android.aapt.RDotTxtEntry;
import com.facebook.buck.android.aapt.RDotTxtEntry.RType;
import com.facebook.buck.android.aapt.RDotTxtEntryUtil;
import com.facebook.buck.android.aapt.RDotTxtEntryUtil.FakeEntry;
import com.facebook.buck.core.build.execution.context.ExecutionContext;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.model.impl.BuildTargetPaths;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.BuildRuleResolver;
import com.facebook.buck.core.rules.SourcePathRuleFinder;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.core.sourcepath.FakeSourcePath;
import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.step.StepExecutionResult;
import com.facebook.buck.step.TestExecutionContext;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.SortedSetMultimap;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.SortedSet;
import java.util.stream.Collectors;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Matchers;
import org.hamcrest.core.StringContains;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
public class MergeAndroidResourcesStepTest {
@Rule public ExpectedException thrown = ExpectedException.none();
private List<RDotTxtEntry> createTestingFakesWithIds(List<RDotTxtEntry> ls) {
return ls.stream().map(RDotTxtEntryUtil::matchId).collect(Collectors.toList());
}
@Test
public void testGenerateRDotJavaForMultipleSymbolsFiles() throws DuplicateResourceException {
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
// Merge everything into the same package space.
String sharedPackageName = "com.facebook.abc";
entriesBuilder.add(
new RDotTxtFile(
sharedPackageName,
"a-R.txt",
ImmutableList.of(
"int id a1 0x7f010001", "int id a2 0x7f010002", "int string a1 0x7f020001")));
entriesBuilder.add(
new RDotTxtFile(
sharedPackageName,
"b-R.txt",
ImmutableList.of(
"int id b1 0x7f010001", "int id b2 0x7f010002", "int string a1 0x7f020001")));
entriesBuilder.add(
new RDotTxtFile(
sharedPackageName,
"c-R.txt",
ImmutableList.of("int attr c1 0x7f010001", "int[] styleable c1 { 0x7f010001 }")));
SortedSetMultimap<String, RDotTxtEntry> packageNameToResources =
MergeAndroidResourcesStep.sortSymbols(
entriesBuilder.buildFilePathToPackageNameSet(),
Optional.empty(),
ImmutableMap.of(),
Optional.empty(),
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
ImmutableSet.of(),
entriesBuilder.getProjectFilesystem(),
false);
assertEquals(1, packageNameToResources.keySet().size());
SortedSet<RDotTxtEntry> resources = packageNameToResources.get(sharedPackageName);
assertEquals(7, resources.size());
Set<String> uniqueEntries = new HashSet<>();
for (RDotTxtEntry resource : resources) {
if (!resource.type.equals(STYLEABLE)) {
assertFalse(
"Duplicate ids should be fixed by renumerate=true; duplicate was: " + resource.idValue,
uniqueEntries.contains(resource.idValue));
uniqueEntries.add(resource.idValue);
}
}
assertEquals(6, uniqueEntries.size());
// All good, no need to further test whether we can write the Java file correctly...
}
@Test
public void testGenerateRDotJavaForWithStyleables() throws DuplicateResourceException {
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
// Merge everything into the same package space.
String sharedPackageName = "com.facebook.abc";
entriesBuilder.add(
new RDotTxtFile(
sharedPackageName,
"a-R.txt",
ImmutableList.of(
"int attr android_layout 0x010100f2",
"int attr buttonPanelSideLayout 0x7f01003a",
"int attr listLayout 0x7f01003b",
"int[] styleable AlertDialog { 0x7f01003a, 0x7f01003b, 0x010100f2 }",
"int styleable AlertDialog_android_layout 2",
"int styleable AlertDialog_buttonPanelSideLayout 0",
"int styleable AlertDialog_multiChoiceItemLayout 1")));
entriesBuilder.add(
new RDotTxtFile(
sharedPackageName,
"b-R.txt",
ImmutableList.of(
"int id a1 0x7f010001",
"int id a2 0x7f010002",
"int attr android_layout_gravity 0x7f078008",
"int attr background 0x7f078009",
"int attr backgroundSplit 0x7f078008",
"int attr backgroundStacked 0x7f078010",
"int attr layout_heightPercent 0x7f078012",
"int[] styleable ActionBar { }",
"int styleable ActionBar_background 10",
"int styleable ActionBar_backgroundSplit 12",
"int styleable ActionBar_backgroundStacked 11",
"int[] styleable ActionBarLayout { 0x7f060008 }",
"int styleable ActionBarLayout_android_layout 0",
"int styleable ActionBarLayout_android_layout_gravity 1",
"int[] styleable PercentLayout_Layout { }",
"int styleable PercentLayout_Layout_layout_aspectRatio 9",
"int styleable PercentLayout_Layout_layout_heightPercent 1")));
SortedSetMultimap<String, RDotTxtEntry> packageNameToResources =
MergeAndroidResourcesStep.sortSymbols(
entriesBuilder.buildFilePathToPackageNameSet(),
Optional.empty(),
ImmutableMap.of(),
Optional.empty(),
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
ImmutableSet.of(),
entriesBuilder.getProjectFilesystem(),
false);
assertEquals(23, packageNameToResources.size());
ArrayList<RDotTxtEntry> resources =
new ArrayList<>(packageNameToResources.get(sharedPackageName));
assertEquals(23, resources.size());
System.out.println(resources);
ImmutableList<RDotTxtEntry> fakeRDotTxtEntryWithIDS =
ImmutableList.of(
FakeEntry.createWithId(INT, ATTR, "android_layout_gravity", "0x07f01005"),
FakeEntry.createWithId(INT, ATTR, "background", "0x07f01006"),
FakeEntry.createWithId(INT, ATTR, "backgroundSplit", "0x07f01007"),
FakeEntry.createWithId(INT, ATTR, "backgroundStacked", "0x07f01008"),
FakeEntry.createWithId(INT, ATTR, "buttonPanelSideLayout", "0x07f01001"),
FakeEntry.createWithId(INT, ATTR, "layout_heightPercent", "0x07f01009"),
FakeEntry.createWithId(INT, ATTR, "listLayout", "0x07f01002"),
FakeEntry.createWithId(INT, ID, "a1", "0x07f01003"),
FakeEntry.createWithId(INT, ID, "a2", "0x07f01004"),
FakeEntry.createWithId(
INT_ARRAY, STYLEABLE, "ActionBar", "{ 0x07f01006,0x07f01007,0x07f01008 }"),
FakeEntry.createWithId(INT, STYLEABLE, "ActionBar_background", "0"),
FakeEntry.createWithId(INT, STYLEABLE, "ActionBar_backgroundSplit", "1"),
FakeEntry.createWithId(INT, STYLEABLE, "ActionBar_backgroundStacked", "2"),
FakeEntry.createWithId(
INT_ARRAY, STYLEABLE, "ActionBarLayout", "{ 0x010100f2,0x07f01005 }"),
FakeEntry.createWithId(INT, STYLEABLE, "ActionBarLayout_android_layout", "0"),
FakeEntry.createWithId(INT, STYLEABLE, "ActionBarLayout_android_layout_gravity", "1"),
FakeEntry.createWithId(
INT_ARRAY, STYLEABLE, "AlertDialog", "{ 0x010100f2,0x07f01001,0x7f01003b }"),
FakeEntry.createWithId(INT, STYLEABLE, "AlertDialog_android_layout", "0"),
FakeEntry.createWithId(INT, STYLEABLE, "AlertDialog_buttonPanelSideLayout", "1"),
FakeEntry.createWithId(INT, STYLEABLE, "AlertDialog_multiChoiceItemLayout", "2"),
FakeEntry.createWithId(
INT_ARRAY, STYLEABLE, "PercentLayout_Layout", "{ 0x00000000,0x07f01009 }"),
FakeEntry.createWithId(INT, STYLEABLE, "PercentLayout_Layout_layout_aspectRatio", "0"),
FakeEntry.createWithId(
INT, STYLEABLE, "PercentLayout_Layout_layout_heightPercent", "1"));
assertEquals(createTestingFakesWithIds(resources), fakeRDotTxtEntryWithIDS);
}
@Test
public void testGenerateRDotJavaForMultipleSymbolsFilesWithDuplicates()
throws DuplicateResourceException {
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
// Merge everything into the same package space.
String sharedPackageName = "com.facebook.abc";
entriesBuilder.add(
new RDotTxtFile(
sharedPackageName,
"a-R.txt",
ImmutableList.of("int id a1 0x7f010001", "int string a1 0x7f020001")));
entriesBuilder.add(
new RDotTxtFile(
sharedPackageName,
"b-R.txt",
ImmutableList.of(
"int id a1 0x7f010001", "int string a1 0x7f010002", "int string c1 0x7f010003")));
entriesBuilder.add(
new RDotTxtFile(
sharedPackageName,
"c-R.txt",
ImmutableList.of(
"int id a1 0x7f010001",
"int string a1 0x7f010002",
"int string b1 0x7f010003",
"int string c1 0x7f010004")));
thrown.expect(DuplicateResourceException.class);
thrown.expectMessage("Resource 'a1' (string) is duplicated across: ");
thrown.expectMessage("Resource 'c1' (string) is duplicated across: ");
BuildTarget resTarget = BuildTargetFactory.newInstance("//:res1");
SourcePathRuleFinder ruleFinder = new TestActionGraphBuilder();
MergeAndroidResourcesStep.sortSymbols(
entriesBuilder.buildFilePathToPackageNameSet(),
Optional.empty(),
ImmutableMap.of(
entriesBuilder.getProjectFilesystem().getPath("a-R.txt"),
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(ruleFinder)
.setBuildTarget(resTarget)
.setRes(FakeSourcePath.of("a/res"))
.setRDotJavaPackage("com.res.a")
.build(),
entriesBuilder.getProjectFilesystem().getPath("b-R.txt"),
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(ruleFinder)
.setBuildTarget(resTarget)
.setRes(FakeSourcePath.of("b/res"))
.setRDotJavaPackage("com.res.b")
.build(),
entriesBuilder.getProjectFilesystem().getPath("c-R.txt"),
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(ruleFinder)
.setBuildTarget(resTarget)
.setRes(FakeSourcePath.of("c/res"))
.setRDotJavaPackage("com.res.c")
.build()),
Optional.empty(),
/* bannedDuplicateResourceTypes */ EnumSet.of(RType.STRING),
ImmutableSet.of(),
entriesBuilder.getProjectFilesystem(),
false);
}
@Test
public void testGenerateRDotJavaForLibrary() throws Exception {
BuildTarget resTarget = BuildTargetFactory.newInstance("//:res1");
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
entriesBuilder.add(
new RDotTxtFile(
"com.res1",
BuildTargetPaths.getGenPath(
entriesBuilder.getProjectFilesystem(), resTarget, "__%s_text_symbols__/R.txt")
.toString(),
ImmutableList.of("int id id1 0x7f020000")));
FakeProjectFilesystem filesystem = entriesBuilder.getProjectFilesystem();
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
AndroidResource res =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(resTarget)
.setRes(FakeSourcePath.of("res"))
.setRDotJavaPackage("com.res1")
.build();
graphBuilder.addToIndex(res);
MergeAndroidResourcesStep mergeStep =
MergeAndroidResourcesStep.createStepForDummyRDotJava(
filesystem,
graphBuilder.getSourcePathResolver(),
ImmutableList.of(res),
Paths.get("output"),
/* forceFinalResourceIds */ false,
/* unionPackage */ Optional.empty(),
/* rName */ Optional.empty(),
/* useOldStyleableFormat */ false,
/* skipNonUnionRDotJava */ false);
ExecutionContext executionContext = TestExecutionContext.newInstance();
assertEquals(0, mergeStep.execute(executionContext).getExitCode());
// Verify that the correct Java code is generated.
assertThat(
filesystem.readFileIfItExists(Paths.get("output/com/res1/R.java")).get(),
CoreMatchers.containsString("{\n public static int id1=0x07f01001;"));
}
@Test
public void testGenerateRDotJavaForOneSymbolsFile() throws Exception {
BuildTarget target = BuildTargetFactory.newInstance("//android_res/com/facebook/http:res");
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
String symbolsFile =
BuildTargetPaths.getGenPath(
entriesBuilder.getProjectFilesystem(), target, "__%s_text_symbols__/R.txt")
.toString();
String rDotJavaPackage = "com.facebook";
ImmutableList<String> outputTextSymbols =
ImmutableList.<String>builder()
.add("int id placeholder 0x7f020000")
.add("int string debug_http_proxy_dialog_title 0x7f030004")
.add("int string debug_http_proxy_hint 0x7f030005")
.add("int string debug_http_proxy_summary 0x7f030003")
.add("int string debug_http_proxy_title 0x7f030002")
.add("int string debug_ssl_cert_check_summary 0x7f030001")
.add("int string debug_ssl_cert_check_title 0x7f030000")
.add("int styleable SherlockMenuItem_android_visible 4")
.add(
"int[] styleable SherlockMenuView { 0x7f010026, 0x7f010027, 0x7f010028, 0x7f010029, "
+ "0x7f01002a, 0x7f01002b, 0x7f01002c, 0x7f01002d }")
.build();
entriesBuilder.add(new RDotTxtFile(rDotJavaPackage, symbolsFile, outputTextSymbols));
FakeProjectFilesystem filesystem = entriesBuilder.getProjectFilesystem();
Path uberRDotTxt = filesystem.resolve("R.txt").toAbsolutePath();
filesystem.writeLinesToPath(outputTextSymbols, uberRDotTxt);
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
AndroidResource resource =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(target)
.setRes(FakeSourcePath.of("res"))
.setRDotJavaPackage("com.facebook")
.build();
graphBuilder.addToIndex(resource);
MergeAndroidResourcesStep mergeStep =
new MergeAndroidResourcesStep(
filesystem,
graphBuilder.getSourcePathResolver(),
ImmutableList.of(resource),
ImmutableList.of(uberRDotTxt),
Paths.get("output"),
/* forceFinalResourceIds */ true,
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
/* filteredResourcesProvider */ Optional.empty(),
/* overrideSymbolsPath */ ImmutableList.of(),
/* unionPackage */ Optional.empty(),
/* rName */ Optional.empty(),
/* useOldStyleableFormat */ false,
/* skipNonUnionRDotJava */ false);
ExecutionContext executionContext = TestExecutionContext.newInstance();
assertEquals(0, mergeStep.execute(executionContext).getExitCode());
// Verify that the correct Java code is generated.
assertEquals(
"package com.facebook;\n"
+ "\n"
+ "public class R {\n"
+ " public static class id {\n"
+ " public static final int placeholder=0x7f020000;\n"
+ " }\n"
+ "\n"
+ " public static class string {\n"
+ " public static final int debug_http_proxy_dialog_title=0x7f030004;\n"
+ " public static final int debug_http_proxy_hint=0x7f030005;\n"
+ " public static final int debug_http_proxy_summary=0x7f030003;\n"
+ " public static final int debug_http_proxy_title=0x7f030002;\n"
+ " public static final int debug_ssl_cert_check_summary=0x7f030001;\n"
+ " public static final int debug_ssl_cert_check_title=0x7f030000;\n"
+ " }\n"
+ "\n"
+ " public static class styleable {\n"
+ " public static final int SherlockMenuItem_android_visible=4;\n"
+ " public static final int[] SherlockMenuView={ 0x7f010026, 0x7f010027, 0x7f010028, "
+ "0x7f010029, 0x7f01002a, 0x7f01002b, 0x7f01002c, 0x7f01002d };\n"
+ " }\n"
+ "\n"
+ "}\n",
filesystem
.readFileIfItExists(Paths.get("output/com/facebook/R.java"))
.get()
.replace("\r", ""));
}
@Test
public void testGenerateRDotJavaForCustomDrawables() throws Exception {
BuildTarget target = BuildTargetFactory.newInstance("//android_res/com/facebook/http:res");
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
String symbolsFile =
BuildTargetPaths.getGenPath(
entriesBuilder.getProjectFilesystem(), target, "__%s_text_symbols__/R.txt")
.toString();
String rDotJavaPackage = "com.facebook";
ImmutableList<String> outputTextSymbols =
ImmutableList.<String>builder()
.add("int drawable android_drawable 0x7f010000")
.add("int drawable fb_drawable 0x7f010001 #")
.build();
entriesBuilder.add(new RDotTxtFile(rDotJavaPackage, symbolsFile, outputTextSymbols));
FakeProjectFilesystem filesystem = entriesBuilder.getProjectFilesystem();
Path uberRDotTxt = filesystem.resolve("R.txt").toAbsolutePath();
filesystem.writeLinesToPath(outputTextSymbols, uberRDotTxt);
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
AndroidResource resource =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(target)
.setRes(FakeSourcePath.of("res"))
.setRDotJavaPackage("com.facebook")
.build();
graphBuilder.addToIndex(resource);
MergeAndroidResourcesStep mergeStep =
new MergeAndroidResourcesStep(
filesystem,
graphBuilder.getSourcePathResolver(),
ImmutableList.of(resource),
ImmutableList.of(uberRDotTxt),
Paths.get("output"),
/* forceFinalResourceIds */ true,
/* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class),
/* filteredResourcesProvider */ Optional.empty(),
/* overrideSymbolsPath */ ImmutableList.of(),
/* unionPackage */ Optional.empty(),
/* rName */ Optional.empty(),
/* useOldStyleableFormat */ false,
/* skipNonUnionRDotJava */ false);
ExecutionContext executionContext = TestExecutionContext.newInstance();
assertEquals(0, mergeStep.execute(executionContext).getExitCode());
// Verify that the correct Java code is generated.
assertEquals(
"package com.facebook;\n"
+ "\n"
+ "public class R {\n"
+ " public static class drawable {\n"
+ " public static final int android_drawable=0x7f010000;\n"
+ " public static final int fb_drawable=0x7f010001;\n"
+ " }\n"
+ "\n"
+ " public static final int[] custom_drawables = { 0x7f010001 };\n"
+ "\n"
+ "}\n",
filesystem
.readFileIfItExists(Paths.get("output/com/facebook/R.java"))
.get()
.replace("\r", ""));
}
@Test
public void testGetRDotJavaFilesWithSkipPrebuiltRDotJava() {
BuildTarget res1Target = BuildTargetFactory.newInstance("//:res1");
BuildTarget res2Target = BuildTargetFactory.newInstance("//:res2");
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
FakeProjectFilesystem filesystem = entriesBuilder.getProjectFilesystem();
BuildRuleResolver buildRuleResolver = new TestActionGraphBuilder();
AndroidResource res1 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(buildRuleResolver)
.setBuildTarget(res1Target)
.setRes(FakeSourcePath.of("res1"))
.setRDotJavaPackage("com.package1")
.build();
AndroidResource res2 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(buildRuleResolver)
.setBuildTarget(res2Target)
.setRes(FakeSourcePath.of("res2"))
.setRDotJavaPackage("com.package2")
.build();
ImmutableList<HasAndroidResourceDeps> resourceDeps = ImmutableList.of(res1, res2);
MergeAndroidResourcesStep mergeStep =
MergeAndroidResourcesStep.createStepForDummyRDotJava(
filesystem,
buildRuleResolver.getSourcePathResolver(),
resourceDeps,
Paths.get("output"),
/* forceFinalResourceIds */ false,
Optional.of("com.package"),
/* rName */ Optional.empty(),
/* useOldStyleableFormat */ false,
/* skipNonUnionRDotJava */ true);
ImmutableSortedSet<Path> rDotJavaFiles = mergeStep.getRDotJavaFiles();
assertEquals(rDotJavaFiles.size(), 1);
ImmutableSortedSet<Path> expected =
ImmutableSortedSet.<Path>naturalOrder()
.add(mergeStep.getPathToRDotJava("com.package"))
.build();
assertEquals(expected, rDotJavaFiles);
}
@Test
public void testGetRDotJavaFilesWithoutSkipPrebuiltRDotJava() {
BuildTarget res1Target = BuildTargetFactory.newInstance("//:res1");
BuildTarget res2Target = BuildTargetFactory.newInstance("//:res2");
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
FakeProjectFilesystem filesystem = entriesBuilder.getProjectFilesystem();
BuildRuleResolver buildRuleResolver = new TestActionGraphBuilder();
AndroidResource res1 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(buildRuleResolver)
.setBuildTarget(res1Target)
.setRes(FakeSourcePath.of("res1"))
.setRDotJavaPackage("com.package1")
.build();
AndroidResource res2 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(buildRuleResolver)
.setBuildTarget(res2Target)
.setRes(FakeSourcePath.of("res2"))
.setRDotJavaPackage("com.package2")
.build();
ImmutableList<HasAndroidResourceDeps> resourceDeps = ImmutableList.of(res1, res2);
MergeAndroidResourcesStep mergeStep =
MergeAndroidResourcesStep.createStepForDummyRDotJava(
filesystem,
buildRuleResolver.getSourcePathResolver(),
resourceDeps,
Paths.get("output"),
/* forceFinalResourceIds */ false,
Optional.of("com.package"),
/* rName */ Optional.empty(),
/* useOldStyleableFormat */ false,
/* skipNonUnionRDotJava */ false);
ImmutableSortedSet<Path> rDotJavaFiles = mergeStep.getRDotJavaFiles();
assertEquals(rDotJavaFiles.size(), 3);
ImmutableSortedSet<Path> expected =
ImmutableSortedSet.<Path>naturalOrder()
.add(mergeStep.getPathToRDotJava("com.package"))
.add(mergeStep.getPathToRDotJava("com.package1"))
.add(mergeStep.getPathToRDotJava("com.package2"))
.build();
assertEquals(expected, rDotJavaFiles);
}
@Test
public void testGenerateRDotJavaWithResourceUnionPackageAndSkipPrebuiltRDotJava()
throws Exception {
BuildTarget res1Target = BuildTargetFactory.newInstance("//:res1");
BuildTarget res2Target = BuildTargetFactory.newInstance("//:res2");
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
entriesBuilder.add(
new RDotTxtFile(
"com.res1",
BuildTargetPaths.getGenPath(
entriesBuilder.getProjectFilesystem(), res1Target, "__%s_text_symbols__/R.txt")
.toString(),
ImmutableList.of("int id id1 0x7f020000")));
entriesBuilder.add(
new RDotTxtFile(
"com.res2",
BuildTargetPaths.getGenPath(
entriesBuilder.getProjectFilesystem(), res2Target, "__%s_text_symbols__/R.txt")
.toString(),
ImmutableList.of("int id id2 0x7f020000")));
FakeProjectFilesystem filesystem = entriesBuilder.getProjectFilesystem();
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
AndroidResource res1 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(res1Target)
.setRes(FakeSourcePath.of("res1"))
.setRDotJavaPackage("res1")
.build();
graphBuilder.addToIndex(res1);
AndroidResource res2 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(res2Target)
.setRes(FakeSourcePath.of("res2"))
.setRDotJavaPackage("res2")
.build();
graphBuilder.addToIndex(res2);
MergeAndroidResourcesStep mergeStep =
MergeAndroidResourcesStep.createStepForDummyRDotJava(
filesystem,
graphBuilder.getSourcePathResolver(),
ImmutableList.of(res1, res2),
Paths.get("output"),
/* forceFinalResourceIds */ false,
Optional.of("res"),
/* rName */ Optional.empty(),
/* useOldStyleableFormat */ false,
/* skipNonUnionRDotJava */ true);
ExecutionContext executionContext = TestExecutionContext.newInstance();
assertEquals(0, mergeStep.execute(executionContext).getExitCode());
String resJava = filesystem.readFileIfItExists(Paths.get("output/res/R.java")).get();
assertThat(resJava, StringContains.containsString("id1"));
assertThat(resJava, StringContains.containsString("id2"));
Optional<String> res1Java = filesystem.readFileIfItExists(Paths.get("output/res1/R.java"));
Optional<String> res2Java = filesystem.readFileIfItExists(Paths.get("output/res2/R.java"));
assertFalse(res1Java.isPresent());
assertFalse(res2Java.isPresent());
}
@Test
public void testGenerateRDotJavaWithResourceUnionPackage() throws Exception {
BuildTarget res1Target = BuildTargetFactory.newInstance("//:res1");
BuildTarget res2Target = BuildTargetFactory.newInstance("//:res2");
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
entriesBuilder.add(
new RDotTxtFile(
"com.res1",
BuildTargetPaths.getGenPath(
entriesBuilder.getProjectFilesystem(), res1Target, "__%s_text_symbols__/R.txt")
.toString(),
ImmutableList.of("int id id1 0x7f020000")));
entriesBuilder.add(
new RDotTxtFile(
"com.res2",
BuildTargetPaths.getGenPath(
entriesBuilder.getProjectFilesystem(), res2Target, "__%s_text_symbols__/R.txt")
.toString(),
ImmutableList.of("int id id2 0x7f020000")));
FakeProjectFilesystem filesystem = entriesBuilder.getProjectFilesystem();
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
AndroidResource res1 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(res1Target)
.setRes(FakeSourcePath.of("res1"))
.setRDotJavaPackage("res1")
.build();
graphBuilder.addToIndex(res1);
AndroidResource res2 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(res2Target)
.setRes(FakeSourcePath.of("res2"))
.setRDotJavaPackage("res2")
.build();
graphBuilder.addToIndex(res2);
MergeAndroidResourcesStep mergeStep =
MergeAndroidResourcesStep.createStepForDummyRDotJava(
filesystem,
graphBuilder.getSourcePathResolver(),
ImmutableList.of(res1, res2),
Paths.get("output"),
/* forceFinalResourceIds */ false,
Optional.of("res1"),
/* rName */ Optional.empty(),
/* useOldStyleableFormat */ false,
/* skipNonUnionRDotJava */ false);
ExecutionContext executionContext = TestExecutionContext.newInstance();
assertEquals(0, mergeStep.execute(executionContext).getExitCode());
String res1java = filesystem.readFileIfItExists(Paths.get("output/res1/R.java")).get();
String res2java = filesystem.readFileIfItExists(Paths.get("output/res2/R.java")).get();
assertThat(res1java, StringContains.containsString("id1"));
assertThat(res1java, StringContains.containsString("id2"));
assertThat(res2java, CoreMatchers.not(StringContains.containsString("id1")));
assertThat(res2java, StringContains.containsString("id2"));
}
@Test
public void testGenerateRDotJavaWithPreviouslyEmptyResourceUnionPackage() throws Exception {
BuildTarget res1Target = BuildTargetFactory.newInstance("//:res1");
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
entriesBuilder.add(
new RDotTxtFile(
"com.res1",
BuildTargetPaths.getGenPath(
entriesBuilder.getProjectFilesystem(), res1Target, "__%s_text_symbols__/R.txt")
.toString(),
ImmutableList.of("int id id1 0x7f020000")));
FakeProjectFilesystem filesystem = entriesBuilder.getProjectFilesystem();
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
AndroidResource res1 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(res1Target)
.setRes(FakeSourcePath.of("res1"))
.setRDotJavaPackage("res1")
.build();
graphBuilder.addToIndex(res1);
MergeAndroidResourcesStep mergeStep =
MergeAndroidResourcesStep.createStepForDummyRDotJava(
filesystem,
graphBuilder.getSourcePathResolver(),
ImmutableList.of(res1),
Paths.get("output"),
/* forceFinalResourceIds */ false,
Optional.of("resM"),
/* rName */ Optional.empty(),
/* useOldStyleableFormat */ false,
/* skipNonUnionRDotJava */ false);
ExecutionContext executionContext = TestExecutionContext.newInstance();
assertEquals(0, mergeStep.execute(executionContext).getExitCode());
String res1java = filesystem.readFileIfItExists(Paths.get("output/res1/R.java")).get();
String resMjava = filesystem.readFileIfItExists(Paths.get("output/resM/R.java")).get();
assertThat(res1java, StringContains.containsString("id1"));
assertThat(resMjava, StringContains.containsString("id1"));
}
@Test
public void testGenerateRDotJavaWithRName() throws Exception {
BuildTarget res1Target = BuildTargetFactory.newInstance("//:res1");
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder();
entriesBuilder.add(
new RDotTxtFile(
"com.res1",
BuildTargetPaths.getGenPath(
entriesBuilder.getProjectFilesystem(), res1Target, "__%s_text_symbols__/R.txt")
.toString(),
ImmutableList.of("int id id1 0x7f020000", "int id id2 0x7f020002")));
FakeProjectFilesystem filesystem = entriesBuilder.getProjectFilesystem();
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
AndroidResource res1 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(res1Target)
.setRes(FakeSourcePath.of("res1"))
.setRDotJavaPackage("res1")
.build();
graphBuilder.addToIndex(res1);
MergeAndroidResourcesStep mergeStep =
MergeAndroidResourcesStep.createStepForDummyRDotJava(
filesystem,
graphBuilder.getSourcePathResolver(),
ImmutableList.of(res1),
Paths.get("output"),
/* forceFinalResourceIds */ true,
Optional.of("res1"),
Optional.of("R2"),
/* useOldStyleableFormat */ false,
/* skipNonUnionRDotJava */ false);
ExecutionContext executionContext = TestExecutionContext.newInstance();
assertEquals(0, mergeStep.execute(executionContext).getExitCode());
String resR2Java = filesystem.readFileIfItExists(Paths.get("output/res1/R2.java")).get();
assertThat(resR2Java, StringContains.containsString("static final int id1=0x07f01001;"));
assertThat(resR2Java, StringContains.containsString("static final int id2=0x07f01002;"));
}
@Test
public void testDuplicateBanning() throws Exception {
BuildTarget res1Target = BuildTargetFactory.newInstance("//:res1");
BuildTarget res2Target = BuildTargetFactory.newInstance("//:res2");
ActionGraphBuilder graphBuilder = new TestActionGraphBuilder();
FakeProjectFilesystem filesystem = new FakeProjectFilesystem();
RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder(filesystem);
entriesBuilder.add(
new RDotTxtFile(
"package",
BuildTargetPaths.getGenPath(filesystem, res1Target, "__%s_text_symbols__/R.txt")
.toString(),
ImmutableList.of(
"int string app_name 0x7f020000", "int drawable android_drawable 0x7f010000")));
entriesBuilder.add(
new RDotTxtFile(
"package",
BuildTargetPaths.getGenPath(filesystem, res2Target, "__%s_text_symbols__/R.txt")
.toString(),
ImmutableList.of(
"int string app_name 0x7f020000", "int drawable android_drawable 0x7f010000")));
AndroidResource res1 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(res1Target)
.setRes(FakeSourcePath.of("res1"))
.setRDotJavaPackage("package")
.build();
graphBuilder.addToIndex(res1);
AndroidResource res2 =
AndroidResourceRuleBuilder.newBuilder()
.setRuleFinder(graphBuilder)
.setBuildTarget(res2Target)
.setRes(FakeSourcePath.of("res2"))
.setRDotJavaPackage("package")
.build();
graphBuilder.addToIndex(res2);
ImmutableList<HasAndroidResourceDeps> resourceDeps = ImmutableList.of(res1, res2);
checkDuplicatesDetected(
graphBuilder.getSourcePathResolver(),
filesystem,
resourceDeps,
EnumSet.noneOf(RType.class),
ImmutableList.of(),
ImmutableList.of("app_name", "android_drawable"),
Optional.empty());
checkDuplicatesDetected(
graphBuilder.getSourcePathResolver(),
filesystem,
resourceDeps,
EnumSet.of(RType.STRING),
ImmutableList.of("app_name"),
ImmutableList.of("android_drawable"),
Optional.empty());
checkDuplicatesDetected(
graphBuilder.getSourcePathResolver(),
filesystem,
resourceDeps,
EnumSet.allOf(RType.class),
ImmutableList.of("app_name", "android_drawable"),
ImmutableList.of(),
Optional.empty());
checkDuplicatesDetected(
graphBuilder.getSourcePathResolver(),
filesystem,
resourceDeps,
EnumSet.allOf(RType.class),
ImmutableList.of("android_drawable"),
ImmutableList.of("app_name"),
Optional.of(ImmutableList.of("string app_name", "color android_drawable")));
}
private void checkDuplicatesDetected(
SourcePathResolverAdapter resolver,
FakeProjectFilesystem filesystem,
ImmutableList<HasAndroidResourceDeps> resourceDeps,
EnumSet<RType> rtypes,
ImmutableList<String> duplicateResources,
ImmutableList<String> ignoredDuplicates,
Optional<List<String>> duplicateWhitelist)
throws IOException {
Optional<Path> duplicateWhitelistPath =
duplicateWhitelist.map(
whitelist -> {
Path whitelistPath = filesystem.resolve("duplicate-whitelist.txt");
filesystem.writeLinesToPath(whitelist, whitelistPath);
return whitelistPath;
});
MergeAndroidResourcesStep mergeStep =
new MergeAndroidResourcesStep(
filesystem,
resolver,
resourceDeps,
/* uberRDotTxt */ ImmutableList.of(),
Paths.get("output"),
true,
rtypes,
duplicateWhitelistPath,
/* overrideSymbolsPath */ ImmutableList.of(),
Optional.empty(),
Optional.empty(),
/* useOldStyleableFormat */ false,
false);
StepExecutionResult result = mergeStep.execute(TestExecutionContext.newInstance());
String message = result.getStderr().orElse("");
if (duplicateResources.isEmpty()) {
assertEquals(0, result.getExitCode());
} else {
assertNotEquals(0, result.getExitCode());
assertThat(message, Matchers.containsString("duplicated"));
}
for (String duplicateResource : duplicateResources) {
assertThat(message, Matchers.containsString(duplicateResource));
}
for (String ignoredDuplicate : ignoredDuplicates) {
assertThat(message, Matchers.not(Matchers.containsString(ignoredDuplicate)));
}
}
// sortSymbols has a goofy API. This will help.
private static class RDotTxtEntryBuilder {
private final FakeProjectFilesystem filesystem;
private final ImmutableMap.Builder<Path, String> filePathToPackageName = ImmutableMap.builder();
RDotTxtEntryBuilder() {
this(new FakeProjectFilesystem());
}
RDotTxtEntryBuilder(FakeProjectFilesystem filesystem) {
this.filesystem = filesystem;
}
public void add(RDotTxtFile entry) {
filesystem.writeLinesToPath(entry.contents, entry.filePath);
filePathToPackageName.put(entry.filePath, entry.packageName);
}
Map<Path, String> buildFilePathToPackageNameSet() {
return filePathToPackageName.build();
}
public FakeProjectFilesystem getProjectFilesystem() {
return filesystem;
}
}
static class RDotTxtFile {
public ImmutableList<String> contents;
String packageName;
Path filePath;
RDotTxtFile(String packageName, String filePath, ImmutableList<String> contents) {
this.packageName = packageName;
this.filePath = Paths.get(filePath);
this.contents = contents;
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* SPDX-License-Identifier: BSD-2-Clause
*
* Copyright 2018, embedded brains GmbH <[email protected]>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <rtems/posix/semaphore.h>
const uint32_t _Configuration_POSIX_Maximum_named_semaphores;
| {
"pile_set_name": "Github"
} |
import python
import experimental.semmle.python.templates.Chevron
from SSTISink s
select s
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python3
import os
import sys
import requests
import argparse
from os import path
from datetime import datetime, timedelta
from subprocess import check_call
time_str = '%Y-%m-%dT%H:%M:%SZ'
short_time_str = '%Y-%m-%d'
args = None
def report_ssim_rebuffer(curr_ts, days):
start_ts = curr_ts - timedelta(days=days)
curr_dir = path.dirname(path.abspath(__file__))
plot_src = path.join(curr_dir, 'plot_ssim_rebuffer.py')
time_range = '{}_{}'.format(start_ts.strftime(short_time_str),
curr_ts.strftime(short_time_str))
output_fig_name = time_range + '.png'
output_fig = path.join(curr_dir, output_fig_name)
# run plot_ssim_rebuffer.py
cmd = [plot_src, args.yaml_settings, '-o', output_fig,
'--from', start_ts.strftime(time_str),
'--to', curr_ts.strftime(time_str)]
sys.stderr.write(' '.join(cmd) + '\n')
check_call(cmd)
# upload output_fig to Google cloud storage
bucket_folder = 'puffer-stanford-public/ssim-rebuffer-figs'
cmd = 'gsutil cp {} gs://{}'.format(output_fig, bucket_folder)
sys.stderr.write(cmd + '\n')
check_call(cmd, shell=True)
gs_url = ('https://storage.googleapis.com/{}/{}'
.format(bucket_folder, output_fig_name))
# remove local output_fig
os.remove(output_fig)
# post output_fig to Zulip
template = 'Performance of ongoing experiments over the past {}:\n' + gs_url
if days == 1:
content = template.format('day')
elif days == 7:
content = template.format('week')
elif days == 14:
content = template.format('two weeks')
else:
content = template.format('{} days'.format(days))
payload = [
('type', 'stream'),
('to', 'puffer-notification'),
('subject', 'Daily Report'),
('content', content),
]
response = requests.post(
os.environ['ZULIP_URL'], data=payload,
auth=(os.environ['ZULIP_BOT_EMAIL'], os.environ['ZULIP_BOT_TOKEN']))
if response.status_code == requests.codes.ok:
print('Posted to Zulip successfully')
else:
print('Failed to post to Zulip')
def main():
parser = argparse.ArgumentParser()
parser.add_argument('yaml_settings')
global args
args = parser.parse_args()
td = datetime.utcnow()
curr_ts = datetime(td.year, td.month, td.day, td.hour, 0)
# report the performance over the past day
report_ssim_rebuffer(curr_ts, 1)
# report the performance over the past week
report_ssim_rebuffer(curr_ts, 7)
# report the performance over the past two weeks
report_ssim_rebuffer(curr_ts, 14)
if __name__ == '__main__':
main()
| {
"pile_set_name": "Github"
} |
package com.glumes.openglbasicshape.draw.texture
import android.content.Context
import android.opengl.GLES20
import android.opengl.GLES30
import com.glumes.openglbasicshape.R
import com.glumes.openglbasicshape.base.LogUtil
import com.glumes.openglbasicshape.draw.BaseShape
import com.glumes.openglbasicshape.utils.MatrixStateOnly
import com.glumes.openglbasicshape.utils.ShaderHelper
import com.glumes.openglbasicshape.utils.TextureHelper
import io.reactivex.Observable
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.util.concurrent.TimeUnit
import javax.microedition.khronos.egl.EGLConfig
import javax.microedition.khronos.opengles.GL10
/**
* Created by glumes on 09/05/2018
*/
class CubeTexture(context: Context) : BaseShape(context) {
private val U_VIEW_MATRIX = "u_ViewMatrix"
private val U_MODEL_MATRIX = "u_ModelMatrix"
private val U_PROJECTION_MATRIX = "u_ProjectionMatrix"
private val A_POSITION = "a_Position"
private val A_TEXTURE_COORDINATE = "a_TextureCoordinates"
private val U_TEXTURE_UNIT = "u_TextureUnit"
private var uModelMatrixAttr: Int = 0
private var uViewMatrixAttr: Int = 0
private var uProjectionMatrixAttr: Int = 0
private var aPositionAttr: Int = 0
private var aTextureCoordinateAttr: Int = 0
private var uTextureUnitAttr: Int = 0
private var mTextureId: IntArray? = null
var vertexFloatBuffer = ByteBuffer
.allocateDirect(8 * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
var textureFloagBuffer = ByteBuffer
.allocateDirect(8 * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
val CubeSize = 1.0f
val HalfCubeSize = CubeSize / 2
var eyeX = 0.0f
val eyeY = 0.0f
var eyeZ = 2.0f
val eyeDistance = 3.0f
val lookX = 0.0f
val lookY = 0.0f
val lookZ = 0.0f
val upX = 0.0f
val upY = 1.0f
val upZ = 0.0f
private val mMatrixStateOnly: MatrixStateOnly
init {
LogUtil.d("cube texture")
mProgram = ShaderHelper.buildProgram(mContext, R.raw.texture_vertex_shader, R.raw.texture_fragment_shader)
GLES20.glUseProgram(mProgram)
initVertexData()
initTextureData()
POSITION_COMPONENT_COUNT = 2
mMatrixStateOnly = MatrixStateOnly()
}
// 六个面的顶点,都是一样的坐标,通过变换矩阵来转换位置进行绘制。
private fun initVertexData() {
val faceLeft = -CubeSize / 2
val faceRight = -faceLeft
val faceTop = CubeSize / 2
val faceBottom = -faceTop
val vertices = floatArrayOf(
faceLeft, faceBottom,
faceRight, faceBottom,
faceLeft, faceTop,
faceRight, faceTop
)
vertexFloatBuffer.put(vertices)
vertexFloatBuffer.position(0)
}
// 六个面的纹理坐标,都是一样的坐标,通过变换矩阵来转换位置进行绘制。
private fun initTextureData() {
val texCoords = floatArrayOf(
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f
)
textureFloagBuffer.put(texCoords)
textureFloagBuffer.position(0)
}
override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
super.onSurfaceCreated(gl, config)
GLES20.glClearColor(0f, 0f, 0f, 1.0f)
//打开深度检测
GLES30.glEnable(GLES30.GL_DEPTH_TEST)
//打开背面剪裁,面剔除,优化显示速度
GLES30.glEnable(GLES30.GL_CULL_FACE)
aPositionAttr = GLES20.glGetAttribLocation(mProgram, A_POSITION)
uModelMatrixAttr = GLES20.glGetUniformLocation(mProgram, U_MODEL_MATRIX)
uViewMatrixAttr = GLES20.glGetUniformLocation(mProgram, U_VIEW_MATRIX)
uProjectionMatrixAttr = GLES20.glGetUniformLocation(mProgram, U_PROJECTION_MATRIX)
aTextureCoordinateAttr = GLES20.glGetAttribLocation(mProgram, A_TEXTURE_COORDINATE)
uTextureUnitAttr = GLES20.glGetUniformLocation(mProgram, U_TEXTURE_UNIT)
mTextureId = TextureHelper.loadCubeTexture(mContext, TextureHelper.CUBE)
GLES20.glUniform1i(uTextureUnitAttr, 0)
}
override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
super.onSurfaceChanged(gl, width, height)
GLES20.glViewport(0, 0, width, height)
val ratio = width.toFloat() / height
val left = -ratio
val bottom = -1.0f
val top = 1.0f
val near = 1.0f
val far = 12.0f
Observable.interval(30, TimeUnit.MILLISECONDS)
.subscribe {
eyeX = eyeDistance * Math.sin((radian * num).toDouble()).toFloat()
eyeZ = eyeDistance * Math.cos((radian * num).toDouble()).toFloat()
num++
if (num > 360) {
num = 0
}
}
mMatrixStateOnly.setCamera(eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ)
mMatrixStateOnly.setProjectFrustum(left, ratio, bottom, top, near, far)
mMatrixStateOnly.setInitStack()
mMatrixStateOnly.rotate(-30f, 0f, 0f, 1f)
mMatrixStateOnly.scale(0.398f, 0.555f, 0f)
mMatrixStateOnly.translate(0.5f, 0.8f, 0f)
}
var num = 0
var RotateNum = 360
val radian = (2 * Math.PI / RotateNum).toFloat()
override fun onDrawFrame(gl: GL10?) {
super.onDrawFrame(gl)
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f)
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT)
// GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT)
vertexFloatBuffer.position(0)
GLES20.glVertexAttribPointer(aPositionAttr, POSITION_COMPONENT_COUNT, GLES20.GL_FLOAT, false, 0, vertexFloatBuffer)
GLES20.glEnableVertexAttribArray(aPositionAttr)
textureFloagBuffer.position(0)
GLES20.glVertexAttribPointer(aTextureCoordinateAttr, POSITION_COMPONENT_COUNT, GLES20.GL_FLOAT, false, 0, textureFloagBuffer)
GLES20.glEnableVertexAttribArray(aTextureCoordinateAttr)
GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
// 控制调整相机来观察不同的面
mMatrixStateOnly.setCamera(eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ)
GLES20.glUniformMatrix4fv(uProjectionMatrixAttr, 1, false, mMatrixStateOnly.getProMatrix(), 0)
GLES20.glUniformMatrix4fv(uViewMatrixAttr, 1, false, mMatrixStateOnly.getVMatrix(), 0)
mMatrixStateOnly.pushMatrix()
// val time = SystemClock.uptimeMillis() % 10000L
// val angleInDegrees = 360.0f / 10000.0f * time.toInt()
// 通过改变旋转矩阵来观察不同的面
// MatrixState.rotate(angleInDegrees, 0f, 1.0f, 0f)
// 开始绘制立方体的每个面
// 前面
mMatrixStateOnly.pushMatrix()
mMatrixStateOnly.translate(0f, 0f, HalfCubeSize)
GLES20.glUniformMatrix4fv(uModelMatrixAttr, 1, false, mMatrixStateOnly.getMMatrix(), 0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId!![0])
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
mMatrixStateOnly.popMatrix()
// 后面
mMatrixStateOnly.pushMatrix()
mMatrixStateOnly.translate(0f, 0f, -HalfCubeSize)
mMatrixStateOnly.rotate(180f, 0f, 1f, 0f)
GLES20.glUniformMatrix4fv(uModelMatrixAttr, 1, false, mMatrixStateOnly.getMMatrix(), 0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId!![1])
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
mMatrixStateOnly.popMatrix()
// 上面
mMatrixStateOnly.pushMatrix()
mMatrixStateOnly.translate(0f, HalfCubeSize, 0f)
mMatrixStateOnly.rotate(-90f, 1f, 0f, 0f)
GLES20.glUniformMatrix4fv(uModelMatrixAttr, 1, false, mMatrixStateOnly.getMMatrix(), 0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId!![2])
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
mMatrixStateOnly.popMatrix()
//下面
mMatrixStateOnly.pushMatrix()
mMatrixStateOnly.translate(0f, -HalfCubeSize, 0f)
mMatrixStateOnly.rotate(90f, 1f, 0f, 0f)
GLES20.glUniformMatrix4fv(uModelMatrixAttr, 1, false, mMatrixStateOnly.getMMatrix(), 0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId!![3])
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
mMatrixStateOnly.popMatrix()
// 左面
mMatrixStateOnly.pushMatrix()
mMatrixStateOnly.translate(HalfCubeSize, 0f, 0f)
mMatrixStateOnly.rotate(-90f, 1f, 0f, 0f)
mMatrixStateOnly.rotate(90f, 0f, 1f, 0f)
GLES20.glUniformMatrix4fv(uModelMatrixAttr, 1, false, mMatrixStateOnly.getMMatrix(), 0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId!![4])
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
mMatrixStateOnly.popMatrix()
// 右面
mMatrixStateOnly.pushMatrix()
mMatrixStateOnly.translate(-HalfCubeSize, 0f, 0f)
mMatrixStateOnly.rotate(90f, 1f, 0f, 0f)
mMatrixStateOnly.rotate(-90f, 0f, 1f, 0f)
GLES20.glUniformMatrix4fv(uModelMatrixAttr, 1, false, mMatrixStateOnly.getMMatrix(), 0)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureId!![5])
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
mMatrixStateOnly.popMatrix()
mMatrixStateOnly.popMatrix()
GLES20.glDisableVertexAttribArray(aPositionAttr)
GLES20.glDisableVertexAttribArray(aTextureCoordinateAttr)
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0)
}
override fun onSurfaceDestroyed() {
super.onSurfaceDestroyed()
GLES20.glDeleteProgram(mProgram)
}
fun getMatrixStateOnly(): MatrixStateOnly {
return mMatrixStateOnly
}
} | {
"pile_set_name": "Github"
} |
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package internalversion
import (
rest "k8s.io/client-go/rest"
"k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset/scheme"
)
type AppsInterface interface {
RESTClient() rest.Interface
ControllerRevisionsGetter
StatefulSetsGetter
}
// AppsClient is used to interact with features provided by the apps group.
type AppsClient struct {
restClient rest.Interface
}
func (c *AppsClient) ControllerRevisions(namespace string) ControllerRevisionInterface {
return newControllerRevisions(c, namespace)
}
func (c *AppsClient) StatefulSets(namespace string) StatefulSetInterface {
return newStatefulSets(c, namespace)
}
// NewForConfig creates a new AppsClient for the given config.
func NewForConfig(c *rest.Config) (*AppsClient, error) {
config := *c
if err := setConfigDefaults(&config); err != nil {
return nil, err
}
client, err := rest.RESTClientFor(&config)
if err != nil {
return nil, err
}
return &AppsClient{client}, nil
}
// NewForConfigOrDie creates a new AppsClient for the given config and
// panics if there is an error in the config.
func NewForConfigOrDie(c *rest.Config) *AppsClient {
client, err := NewForConfig(c)
if err != nil {
panic(err)
}
return client
}
// New creates a new AppsClient for the given RESTClient.
func New(c rest.Interface) *AppsClient {
return &AppsClient{c}
}
func setConfigDefaults(config *rest.Config) error {
config.APIPath = "/apis"
if config.UserAgent == "" {
config.UserAgent = rest.DefaultKubernetesUserAgent()
}
if config.GroupVersion == nil || config.GroupVersion.Group != scheme.Scheme.PrioritizedVersionsForGroup("apps")[0].Group {
gv := scheme.Scheme.PrioritizedVersionsForGroup("apps")[0]
config.GroupVersion = &gv
}
config.NegotiatedSerializer = scheme.Codecs
if config.QPS == 0 {
config.QPS = 5
}
if config.Burst == 0 {
config.Burst = 10
}
return nil
}
// RESTClient returns a RESTClient that is used to communicate
// with API server by this client implementation.
func (c *AppsClient) RESTClient() rest.Interface {
if c == nil {
return nil
}
return c.restClient
}
| {
"pile_set_name": "Github"
} |
title: TCC-Transaction 源码分析 —— 调试环境搭建
date: 2018-02-01
tags:
categories: TCC-Transaction
permalink: TCC-Transaction/build-debugging-environment
---
摘要: 原创出处 http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/ 「芋道源码」欢迎转载,保留摘要,谢谢!
**本文主要基于 TCC-Transaction 1.2.3.3 正式版**
- [1. 依赖工具](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/)
- [2. 源码拉取](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/)
- [3. 初始化数据库](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/)
- [4. 启动 capital 项目](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/)
- [5. 启动 redpacket 项目](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/)
- [6. 启动 order 项目](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/)
- [666. 彩蛋](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/)
-------
![](http://www.iocoder.cn/images/common/wechat_mp_2018_05_18.jpg)
> 🙂🙂🙂关注**微信公众号:【芋道源码】**有福利:
> 1. RocketMQ / MyCAT / Sharding-JDBC **所有**源码分析文章列表
> 2. RocketMQ / MyCAT / Sharding-JDBC **中文注释源码 GitHub 地址**
> 3. 您对于源码的疑问每条留言**都**将得到**认真**回复。**甚至不知道如何读源码也可以请教噢**。
> 4. **新的**源码解析文章**实时**收到通知。**每周更新一篇左右**。
> 5. **认真的**源码交流微信群。
-------
# 1. 依赖工具
* Maven
* Git
* JDK
* MySQL
* IntelliJ IDEA
# 2. 源码拉取
从官方仓库 [https://github.com/changmingxie/tcc-transaction.git](https://github.com/changmingxie/tcc-transaction.git) `Fork` 出属于自己的仓库。为什么要 `Fork` ?既然开始阅读、调试源码,我们可能会写一些注释,有了自己的仓库,可以进行自由的提交。😈
使用 `IntelliJ IDEA` 从 `Fork` 出来的仓库拉取代码。拉取完成后,`Maven` 会下载依赖包,可能会花费一些时间,耐心等待下。
本文基于 `master-1.2.x` 分支。
# 3. 初始化数据库
官方提供了两个 Demo 项目例子:
* tcc-transaction-dubbo-sample
* tcc-transaction-http-sample
考虑到不是所有所有同学都使用过 Dubbo 服务化框架,我们以 tcc-transaction-http-sample 项目为例子。
> 旁白君,一定注意,是 tcc-transaction-http-sample 项目,千万千万千万不要用错了!!!
打开 tcc-transaction-http-sample/src/main/dbscripts 目录,有四个 SQL 脚本文件:
* `create_db_cap.sql` :tcc-transaction-http-capital 项目数据库初始化脚本。
* `create_db_ord.sql` :tcc-transaction-http-order 项目数据库初始化脚本。
* `create_db_red.sql` :tcc-transaction-http-redpacket 项目数据库初始化脚本。
* `create_db_tcc.sql` :tcc-transaction **底层**数据库初始化脚本。
笔者使用 Navicat 进行数据库脚本执行。使用方式为:Navicat 菜单 Connection -> Execute SQL File,选择脚本文件,逐个执行。
目前数据库脚本未使用 `USE` 语句选择对应数据库,每个脚本都需要进行添加。以 `create_db_cap.sql` 举例子:
```SQL
CREATE DATABASE `tcc_cap` /*!40100 DEFAULT CHARACTER SET utf8 */;
-- 新增 USE
USE `tcc_cap`;
```
# 4. 启动 capital 项目
1. 修改项目下 `jdbc.properties` 文件,**填写成你的数据库地址**。
2. 使用 IDEA 配置 Tomcat 进行启动。这里要注意下:
```XML
// appcontext-service-provider.xml
<bean id="httpServer"
class="org.springframework.remoting.support.SimpleHttpServerFactoryBean">
<property name="contexts">
<util:map>
<entry key="/remoting/CapitalTradeOrderService" value-ref="capitalTradeOrderServiceExporter"/>
<entry key="/remoting/CapitalAccountService" value-ref="capitalAccountServiceExporter"/>
</util:map>
</property>
<property name="port" value="8081"/>
</bean>
```
* 默认开启 8081 端口提供接口服务。所以配置 Tomcat 的端口不能再使用 8081,避免冲突。例如,笔者使用 18081。
3. 访问 `http://127.0.0.1:18081/`,看到 "hello tcc transacton http sample capital",代表项目启动完成。**`18081` 为你填写的 Tomcat 端口**。
# 5. 启动 redpacket 项目
同 tcc-transaction-http-capital 项目。
# 6. 启动 order 项目
1. 修改项目下 `jdbc.properties` 文件,**填写成你的数据库地址**。
2. 使用 IDEA 配置 Tomcat 进行启动。
3. 访问 `http://127.0.0.1:8080/`,看到 "sample 说明...",代表项目启动完成。**`8080` 为你填写的 Tomcat 端口**。
4. 点击 [商品列表链接] -> [购买] -> [支付],如果看到 "支付成功" 或者 "支付失败",恭喜你🎉,你已经成功搭建完你的调试环境。愉快的开始玩耍把。
# 666. 彩蛋
![知识星球](http://www.iocoder.cn/images/Architecture/2017_12_29/01.png)
调试环境搭建是阅读源码的第一步,如果你碰到无法搭建成功的情况,请给笔者公众号( **芋道源码** )留言。笔者会给你 1:1 的高级( **搞基** )支持。
另外这是一个系列文,本系列更新 TCC-Transaction ,下一个系列更新 ByteTCC 。嗨皮不?!
道友,赶紧上车,分享一波朋友圈!
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:theme="@style/Base.Theme.OpenWith"
android:paddingTop="8dp">
<EditText
android:id="@+id/add_to_home_screen_title"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginEnd="20dp"
android:layout_marginStart="20dp"
android:hint="@string/add_to_home_screen_dialog_hint"
android:singleLine="true"
android:imeOptions="actionGo"
android:imeActionLabel="@string/add"
android:inputType="textCapSentences"
tools:ignore="Autofill" />
<com.tasomaniac.android.widget.DelayedProgressBar
android:id="@+id/add_to_home_screen_progress"
style="?android:progressBarStyleSmall"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical|end"
android:layout_marginEnd="24dp"
android:visibility="gone" />
</FrameLayout>
| {
"pile_set_name": "Github"
} |
require 'rack'
require 'uri'
class Refraction
class Request < Rack::Request
attr_reader :action, :status, :message
def method; request_method; end
def query; query_string; end
### actions
def set(options)
if options.is_a?(String)
@re_location = options
else
@re_scheme = options[:protocol] if options[:protocol] # :protocol is alias for :scheme
@re_scheme = options[:scheme] if options[:scheme]
@re_host = options[:host] if options[:host]
@re_port = options[:port] if options[:port]
@re_path = options[:path] if options[:path]
@re_query = options[:query] if options[:query]
end
end
def rewrite!(options)
@action = :rewrite
set(options)
end
def permanent!(options)
@action = :permanent
@status = 301
set(options)
@message = "moved to #{@uri}"
end
def found!(options)
@action = :found
@status = 302
set(options)
@message = "moved to #{@uri}"
end
def respond!(status, headers, content)
@action = :respond
@status = status
@headers = headers
@message = content
end
### response
def response
headers = @headers || { 'Location' => location, 'Content-Type' => 'text/plain' }
headers['Content-Length'] = message.length.to_s
[status, headers, [message]]
end
def location
@re_location || url
end
def scheme; @re_scheme || super; end
def host; @re_host || super; end
def path; @re_path || super; end
def query_string; @re_query || super; end
def port
@re_port || ((@re_scheme || @re_host) && default_port) || super
end
def default_port
case scheme
when "http" ; 80
when "https" ; 443
end
end
def http_host
self.port ? "#{self.host}:#{self.port}" : self.host
end
end ### class Request
def self.configure(&block)
@rules = block
end
def self.rules
@rules
end
def initialize(app)
@app = app
end
def rules
self.class.rules
end
def call(env)
if self.rules
request = Request.new(env)
self.rules.call(request)
case request.action
when :permanent, :found, :respond
request.response
when :rewrite
env["rack.url_scheme"] = request.scheme
env["HTTP_HOST"] = request.http_host
env["SERVER_NAME"] = request.host
env["HTTP_PORT"] = request.port if request.port
env["PATH_INFO"] = request.path
env["QUERY_STRING"] = request.query
env["REQUEST_URI"] = request.fullpath
@app.call(env)
else
@app.call(env)
end
else
@app.call(env)
end
end
end
# Rack version compatibility shim
if Rack.release == "1.0"
class Rack::Request
def path
script_name + path_info
end
end
end
| {
"pile_set_name": "Github"
} |
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!74 &7400000
AnimationClip:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: growToHeight180
serializedVersion: 6
m_Legacy: 0
m_Compressed: 0
m_UseHighQualityCurve: 1
m_RotationCurves: []
m_CompressedRotationCurves: []
m_PositionCurves: []
m_ScaleCurves: []
m_FloatCurves:
- curve:
serializedVersion: 2
m_Curve:
- time: 0
value: 0
inSlope: 0
outSlope: 0
tangentMode: 1
- time: .333333343
value: 180
inSlope: -894.097351
outSlope: 800
tangentMode: 1
m_PreInfinity: 2
m_PostInfinity: 2
attribute: m_PreferredHeight
path:
classID: 114
script: {fileID: 1679637790, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_PPtrCurves: []
m_SampleRate: 60
m_WrapMode: 0
m_Bounds:
m_Center: {x: 0, y: 0, z: 0}
m_Extent: {x: 0, y: 0, z: 0}
m_ClipBindingConstant:
genericBindings:
- path: 0
attribute: 379034303
script: {fileID: 1679637790, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
classID: 114
customType: 0
isPPtrCurve: 0
pptrCurveMapping: []
m_AnimationClipSettings:
serializedVersion: 2
m_StartTime: 0
m_StopTime: .333333343
m_OrientationOffsetY: 0
m_Level: 0
m_CycleOffset: 0
m_LoopTime: 0
m_LoopBlend: 0
m_LoopBlendOrientation: 0
m_LoopBlendPositionY: 0
m_LoopBlendPositionXZ: 0
m_KeepOriginalOrientation: 0
m_KeepOriginalPositionY: 1
m_KeepOriginalPositionXZ: 0
m_HeightFromFeet: 0
m_Mirror: 0
m_EditorCurves:
- curve:
serializedVersion: 2
m_Curve:
- time: 0
value: 0
inSlope: 0
outSlope: 0
tangentMode: 1
- time: .333333343
value: 180
inSlope: -894.097351
outSlope: 800
tangentMode: 1
m_PreInfinity: 2
m_PostInfinity: 2
attribute: m_PreferredHeight
path:
classID: 114
script: {fileID: 1679637790, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_EulerEditorCurves: []
m_HasGenericRootTransform: 0
m_HasMotionFloatCurves: 0
m_GenerateMotionCurves: 0
m_Events: []
| {
"pile_set_name": "Github"
} |
import NTask from "../ntask.js";
import Template from "../templates/user.js";
class User extends NTask {
constructor(body) {
super();
this.body = body;
}
render() {
this.renderUserData();
}
addEventListener() {
this.userCancelClick();
}
renderUserData() {
const opts = {
method: "GET",
url: `${this.URL}/user`,
json: true,
headers: {
authorization: localStorage.getItem("token")
}
};
this.request(opts, (err, resp, data) => {
if (err || resp.status === 412) {
this.emit("error", err);
} else {
this.body.innerHTML = Template.render(data);
this.addEventListener();
}
});
}
userCancelClick() {
const button = this.body.querySelector("[data-remove-account]");
button.addEventListener("click", (e) => {
e.preventDefault();
if (confirm("This will cancel your account, are you sure?")) {
const opts = {
method: "DELETE",
url: `${this.URL}/user`,
headers: {
authorization: localStorage.getItem("token")
}
};
this.request(opts, (err, resp, data) => {
if (err || resp.status === 412) {
this.emit("remove-error", err);
} else {
this.emit("remove-account");
}
});
}
});
}
}
module.exports = User;
| {
"pile_set_name": "Github"
} |
---
title: UnionCaseInfo.GetFields Method (F#)
description: UnionCaseInfo.GetFields Method (F#)
keywords: visual f#, f#, functional programming
author: dend
manager: danielfe
ms.date: 05/16/2016
ms.topic: language-reference
ms.prod: visual-studio-dev14
ms.technology: devlang-fsharp
ms.assetid: 1d3acc9a-0087-43ef-b07e-32da355b67bd
---
# UnionCaseInfo.GetFields Method (F#)
The fields associated with the case, represented by a `System.Reflection.PropertyInfo`.
**Namespace/Module Path:** Microsoft.FSharp.Reflection
**Assembly:** FSharp.Core (in FSharp.Core.dll)
## Syntax
```fsharp
// Signature:
member this.GetFields : unit -> PropertyInfo []
// Usage:
unionCaseInfo.GetFields ()
```
## Return Value
The fields associated with the case as an array of `System.Reflection.PropertyInfo`.
## Platforms
Windows 8, Windows 7, Windows Server 2012, Windows Server 2008 R2
## Version Information
**F# Core Library Versions**
Supported in: 2.0, 4.0, Portable
## See Also
[Reflection.UnionCaseInfo Class (F#)](Reflection.UnionCaseInfo-Class-%5BFSharp%5D.md)
[Microsoft.FSharp.Reflection Namespace (F#)](Microsoft.FSharp.Reflection-Namespace-%5BFSharp%5D.md) | {
"pile_set_name": "Github"
} |
using System.Security.Claims;
using GraphQL.Authorization;
namespace GraphQl.AspNetCore
{
public class GraphQLUserContext : IProvideClaimsPrincipal
{
public ClaimsPrincipal User { get; set; }
}
}
| {
"pile_set_name": "Github"
} |
//======= Copyright (c) Valve Corporation, All rights reserved. ===============
//
// Purpose: Allows Enums to be shown in the inspector as flags
//
//=============================================================================
using UnityEngine;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace Valve.VR.InteractionSystem
{
//-------------------------------------------------------------------------
public class EnumFlags : PropertyAttribute
{
public EnumFlags() { }
}
#if UNITY_EDITOR
//-------------------------------------------------------------------------
[CustomPropertyDrawer( typeof( EnumFlags ) )]
public class EnumFlagsPropertyDrawer : PropertyDrawer
{
public override void OnGUI( Rect position, SerializedProperty property, GUIContent label )
{
property.intValue = EditorGUI.MaskField( position, label, property.intValue, property.enumNames );
}
}
#endif
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2016, Canon Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Canon Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include "CachedRawResourceClient.h"
#include "CachedResourceHandle.h"
#include "ResourceRequest.h"
namespace WebCore {
class CachedRawResource;
class Document;
class DocumentThreadableLoader;
class ResourceError;
class CrossOriginPreflightChecker final : private CachedRawResourceClient {
public:
static void doPreflight(DocumentThreadableLoader&, ResourceRequest&&);
CrossOriginPreflightChecker(DocumentThreadableLoader&, ResourceRequest&&);
~CrossOriginPreflightChecker();
void startPreflight();
void setDefersLoading(bool);
private:
void notifyFinished(CachedResource&) final;
void redirectReceived(CachedResource&, ResourceRequest&&, const ResourceResponse&, CompletionHandler<void(ResourceRequest&&)>&&) final;
static void handleLoadingFailure(DocumentThreadableLoader&, unsigned long, const ResourceError&);
static void validatePreflightResponse(DocumentThreadableLoader&, ResourceRequest&&, unsigned long, const ResourceResponse&);
DocumentThreadableLoader& m_loader;
CachedResourceHandle<CachedRawResource> m_resource;
ResourceRequest m_request;
};
} // namespace WebCore
| {
"pile_set_name": "Github"
} |
import tensorflow as tf
from models.stacked_bidirectional import StackedBidirectional
class LstmStackedBidirectional(StackedBidirectional):
def __init__(self, review_summary_file, checkpointer, num_layers, attention=False):
"""
:param review_summary_file:
:param checkpointer:
:param num_layers:
:param attention:
"""
super(LstmStackedBidirectional, self).__init__(review_summary_file, checkpointer, num_layers, attention)
def get_cell(self):
"""
Return the atomic RNN cell type used for this model
:return: The atomic RNN Cell
"""
return tf.nn.rnn_cell.LSTMCell(self.memory_dim)
| {
"pile_set_name": "Github"
} |
<?xml version='1.0' encoding='utf-8'?>
<testproblem>
<name>Standing free surface wave in channel using P1DG-P2.</name>
<owner userid="skramer"/>
<tags>flml</tags>
<problem_definition length="short" nprocs="1">
<command_line>fluidity -v3 -l standing_wave.flml</command_line>
</problem_definition>
<variables>
<variable name="solvers_converged" language="python">import os
files = os.listdir("./")
solvers_converged = not "matrixdump" in files and not "matrixdump.info" in files</variable>
<variable name="fs_left" language="python">
import h5py
import numpy as np
f = h5py.File('standing_wave.detectors.h5part', 'r')
lt_id = f.attrs['Left%ids'][0] - 1
fsl = np.array([f['/Step#{}/Water%FreeSurface'.format(i)][lt_id] for i in range(len(f))])
t = np.array([f['/Step#{}'.format(i)].attrs['time'][0] for i in range(len(f))])
fs_left={}
fs_left['min']=fsl.min()
fs_left['min_time']=t[fsl.argmin()]
fs_left['end_value']=fsl[-1]</variable>
<variable name="fs_right" language="python">
import h5py
import numpy as np
f = h5py.File('standing_wave.detectors.h5part', 'r')
rt_id = f.attrs['Right%ids'][0] - 1
fsr = np.array([f['/Step#{}/Water%FreeSurface'.format(i)][rt_id] for i in range(len(f))])
t = np.array([f['/Step#{}'.format(i)].attrs['time'][0] for i in range(len(f))])
fs_right={}
fs_right['max']=fsr.max()
fs_right['max_time']=t[fsr.argmax()]
fs_right['end_value']=fsr[-1]</variable>
<variable name="fs_integral_max" language="python">import fluidity_tools
stat=fluidity_tools.stat_parser('standing_wave.stat')
fsi=stat['Water']['FreeSurface']['integral']
fs_integral_max=abs(fsi).max()<comment>Horizontal integral of free surface should be zero</comment></variable>
<variable name="fs_integral_range" language="python">import fluidity_tools
stat=fluidity_tools.stat_parser('standing_wave.stat')
fsi=stat['Water']['FreeSurface']['integral']
fs_integral_range=fsi.max()-fsi.min()<comment>Horizontal integral of free surface should be zero</comment></variable>
</variables>
<pass_tests>
<test name="Solvers converged" language="python">assert(solvers_converged)</test>
<test name="fs_left_min" language="python">assert(fs_left['min']<-0.95)</test>
<test name="fs_left_min_time" language="python">assert abs(fs_left['min_time']-1e4)<400</test>
<test name="fs_end_value" language="python">assert abs(fs_left['end_value']-1.0)<0.2</test>
<test name="fs_right_max" language="python">assert(fs_right['max']>-0.95)</test>
<test name="fs_right_max_time" language="python">assert abs(fs_right['max_time']-1e4)<200</test>
<test name="fs_end_value" language="python">assert abs(fs_right['end_value']+1.0)<0.2</test>
<test name="fs_integral_max_zero" language="python">area=1e6*1e5
assert fs_integral_max<area*1e-5</test>
<test name="fs_integral_range_zero" language="python">assert fs_integral_range<1.0</test>
</pass_tests>
<warn_tests/>
</testproblem>
| {
"pile_set_name": "Github"
} |
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package util
import (
"bytes"
"errors"
"fmt"
"io"
"net/url"
"os"
"strconv"
"strings"
"time"
jsonpatch "github.com/evanphx/json-patch"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/meta"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/util/yaml"
"k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/cli-runtime/pkg/resource"
"k8s.io/client-go/dynamic"
"k8s.io/client-go/rest"
"k8s.io/client-go/scale"
"k8s.io/client-go/tools/clientcmd"
"k8s.io/klog"
utilexec "k8s.io/utils/exec"
)
const (
ApplyAnnotationsFlag = "save-config"
DefaultErrorExitCode = 1
)
type debugError interface {
DebugError() (msg string, args []interface{})
}
// AddSourceToErr adds handleResourcePrefix and source string to error message.
// verb is the string like "creating", "deleting" etc.
// source is the filename or URL to the template file(*.json or *.yaml), or stdin to use to handle the resource.
func AddSourceToErr(verb string, source string, err error) error {
if source != "" {
if statusError, ok := err.(apierrors.APIStatus); ok {
status := statusError.Status()
status.Message = fmt.Sprintf("error when %s %q: %v", verb, source, status.Message)
return &apierrors.StatusError{ErrStatus: status}
}
return fmt.Errorf("error when %s %q: %v", verb, source, err)
}
return err
}
var fatalErrHandler = fatal
// BehaviorOnFatal allows you to override the default behavior when a fatal
// error occurs, which is to call os.Exit(code). You can pass 'panic' as a function
// here if you prefer the panic() over os.Exit(1).
func BehaviorOnFatal(f func(string, int)) {
fatalErrHandler = f
}
// DefaultBehaviorOnFatal allows you to undo any previous override. Useful in
// tests.
func DefaultBehaviorOnFatal() {
fatalErrHandler = fatal
}
// fatal prints the message (if provided) and then exits. If V(2) or greater,
// klog.Fatal is invoked for extended information.
func fatal(msg string, code int) {
if klog.V(2) {
klog.FatalDepth(2, msg)
}
if len(msg) > 0 {
// add newline if needed
if !strings.HasSuffix(msg, "\n") {
msg += "\n"
}
fmt.Fprint(os.Stderr, msg)
}
os.Exit(code)
}
// ErrExit may be passed to CheckError to instruct it to output nothing but exit with
// status code 1.
var ErrExit = fmt.Errorf("exit")
// CheckErr prints a user friendly error to STDERR and exits with a non-zero
// exit code. Unrecognized errors will be printed with an "error: " prefix.
//
// This method is generic to the command in use and may be used by non-Kubectl
// commands.
func CheckErr(err error) {
checkErr(err, fatalErrHandler)
}
// CheckDiffErr prints a user friendly error to STDERR and exits with a
// non-zero and non-one exit code. Unrecognized errors will be printed
// with an "error: " prefix.
//
// This method is meant specifically for `kubectl diff` and may be used
// by other commands.
func CheckDiffErr(err error) {
checkErr(err, func(msg string, code int) {
fatalErrHandler(msg, code+1)
})
}
// checkErr formats a given error as a string and calls the passed handleErr
// func with that string and an kubectl exit code.
func checkErr(err error, handleErr func(string, int)) {
// unwrap aggregates of 1
if agg, ok := err.(utilerrors.Aggregate); ok && len(agg.Errors()) == 1 {
err = agg.Errors()[0]
}
if err == nil {
return
}
switch {
case err == ErrExit:
handleErr("", DefaultErrorExitCode)
case apierrors.IsInvalid(err):
details := err.(*apierrors.StatusError).Status().Details
s := "The request is invalid"
if details == nil {
handleErr(s, DefaultErrorExitCode)
return
}
if len(details.Kind) != 0 || len(details.Name) != 0 {
s = fmt.Sprintf("The %s %q is invalid", details.Kind, details.Name)
}
if len(details.Causes) > 0 {
errs := statusCausesToAggrError(details.Causes)
handleErr(MultilineError(s+": ", errs), DefaultErrorExitCode)
} else {
handleErr(s, DefaultErrorExitCode)
}
case clientcmd.IsConfigurationInvalid(err):
handleErr(MultilineError("Error in configuration: ", err), DefaultErrorExitCode)
default:
switch err := err.(type) {
case *meta.NoResourceMatchError:
switch {
case len(err.PartialResource.Group) > 0 && len(err.PartialResource.Version) > 0:
handleErr(fmt.Sprintf("the server doesn't have a resource type %q in group %q and version %q", err.PartialResource.Resource, err.PartialResource.Group, err.PartialResource.Version), DefaultErrorExitCode)
case len(err.PartialResource.Group) > 0:
handleErr(fmt.Sprintf("the server doesn't have a resource type %q in group %q", err.PartialResource.Resource, err.PartialResource.Group), DefaultErrorExitCode)
case len(err.PartialResource.Version) > 0:
handleErr(fmt.Sprintf("the server doesn't have a resource type %q in version %q", err.PartialResource.Resource, err.PartialResource.Version), DefaultErrorExitCode)
default:
handleErr(fmt.Sprintf("the server doesn't have a resource type %q", err.PartialResource.Resource), DefaultErrorExitCode)
}
case utilerrors.Aggregate:
handleErr(MultipleErrors(``, err.Errors()), DefaultErrorExitCode)
case utilexec.ExitError:
handleErr(err.Error(), err.ExitStatus())
default: // for any other error type
msg, ok := StandardErrorMessage(err)
if !ok {
msg = err.Error()
if !strings.HasPrefix(msg, "error: ") {
msg = fmt.Sprintf("error: %s", msg)
}
}
handleErr(msg, DefaultErrorExitCode)
}
}
}
func statusCausesToAggrError(scs []metav1.StatusCause) utilerrors.Aggregate {
errs := make([]error, 0, len(scs))
errorMsgs := sets.NewString()
for _, sc := range scs {
// check for duplicate error messages and skip them
msg := fmt.Sprintf("%s: %s", sc.Field, sc.Message)
if errorMsgs.Has(msg) {
continue
}
errorMsgs.Insert(msg)
errs = append(errs, errors.New(msg))
}
return utilerrors.NewAggregate(errs)
}
// StandardErrorMessage translates common errors into a human readable message, or returns
// false if the error is not one of the recognized types. It may also log extended
// information to klog.
//
// This method is generic to the command in use and may be used by non-Kubectl
// commands.
func StandardErrorMessage(err error) (string, bool) {
if debugErr, ok := err.(debugError); ok {
klog.V(4).Infof(debugErr.DebugError())
}
status, isStatus := err.(apierrors.APIStatus)
switch {
case isStatus:
switch s := status.Status(); {
case s.Reason == metav1.StatusReasonUnauthorized:
return fmt.Sprintf("error: You must be logged in to the server (%s)", s.Message), true
case len(s.Reason) > 0:
return fmt.Sprintf("Error from server (%s): %s", s.Reason, err.Error()), true
default:
return fmt.Sprintf("Error from server: %s", err.Error()), true
}
case apierrors.IsUnexpectedObjectError(err):
return fmt.Sprintf("Server returned an unexpected response: %s", err.Error()), true
}
switch t := err.(type) {
case *url.Error:
klog.V(4).Infof("Connection error: %s %s: %v", t.Op, t.URL, t.Err)
switch {
case strings.Contains(t.Err.Error(), "connection refused"):
host := t.URL
if server, err := url.Parse(t.URL); err == nil {
host = server.Host
}
return fmt.Sprintf("The connection to the server %s was refused - did you specify the right host or port?", host), true
}
return fmt.Sprintf("Unable to connect to the server: %v", t.Err), true
}
return "", false
}
// MultilineError returns a string representing an error that splits sub errors into their own
// lines. The returned string will end with a newline.
func MultilineError(prefix string, err error) string {
if agg, ok := err.(utilerrors.Aggregate); ok {
errs := utilerrors.Flatten(agg).Errors()
buf := &bytes.Buffer{}
switch len(errs) {
case 0:
return fmt.Sprintf("%s%v\n", prefix, err)
case 1:
return fmt.Sprintf("%s%v\n", prefix, messageForError(errs[0]))
default:
fmt.Fprintln(buf, prefix)
for _, err := range errs {
fmt.Fprintf(buf, "* %v\n", messageForError(err))
}
return buf.String()
}
}
return fmt.Sprintf("%s%s\n", prefix, err)
}
// PrintErrorWithCauses prints an error's kind, name, and each of the error's causes in a new line.
// The returned string will end with a newline.
// Returns true if a case exists to handle the error type, or false otherwise.
func PrintErrorWithCauses(err error, errOut io.Writer) bool {
switch t := err.(type) {
case *apierrors.StatusError:
errorDetails := t.Status().Details
if errorDetails != nil {
fmt.Fprintf(errOut, "error: %s %q is invalid\n\n", errorDetails.Kind, errorDetails.Name)
for _, cause := range errorDetails.Causes {
fmt.Fprintf(errOut, "* %s: %s\n", cause.Field, cause.Message)
}
return true
}
}
fmt.Fprintf(errOut, "error: %v\n", err)
return false
}
// MultipleErrors returns a newline delimited string containing
// the prefix and referenced errors in standard form.
func MultipleErrors(prefix string, errs []error) string {
buf := &bytes.Buffer{}
for _, err := range errs {
fmt.Fprintf(buf, "%s%v\n", prefix, messageForError(err))
}
return buf.String()
}
// messageForError returns the string representing the error.
func messageForError(err error) string {
msg, ok := StandardErrorMessage(err)
if !ok {
msg = err.Error()
}
return msg
}
func UsageErrorf(cmd *cobra.Command, format string, args ...interface{}) error {
msg := fmt.Sprintf(format, args...)
return fmt.Errorf("%s\nSee '%s -h' for help and examples", msg, cmd.CommandPath())
}
func IsFilenameSliceEmpty(filenames []string, directory string) bool {
return len(filenames) == 0 && directory == ""
}
func GetFlagString(cmd *cobra.Command, flag string) string {
s, err := cmd.Flags().GetString(flag)
if err != nil {
klog.Fatalf("error accessing flag %s for command %s: %v", flag, cmd.Name(), err)
}
return s
}
// GetFlagStringSlice can be used to accept multiple argument with flag repetition (e.g. -f arg1,arg2 -f arg3 ...)
func GetFlagStringSlice(cmd *cobra.Command, flag string) []string {
s, err := cmd.Flags().GetStringSlice(flag)
if err != nil {
klog.Fatalf("error accessing flag %s for command %s: %v", flag, cmd.Name(), err)
}
return s
}
// GetFlagStringArray can be used to accept multiple argument with flag repetition (e.g. -f arg1 -f arg2 ...)
func GetFlagStringArray(cmd *cobra.Command, flag string) []string {
s, err := cmd.Flags().GetStringArray(flag)
if err != nil {
klog.Fatalf("error accessing flag %s for command %s: %v", flag, cmd.Name(), err)
}
return s
}
func GetFlagBool(cmd *cobra.Command, flag string) bool {
b, err := cmd.Flags().GetBool(flag)
if err != nil {
klog.Fatalf("error accessing flag %s for command %s: %v", flag, cmd.Name(), err)
}
return b
}
// Assumes the flag has a default value.
func GetFlagInt(cmd *cobra.Command, flag string) int {
i, err := cmd.Flags().GetInt(flag)
if err != nil {
klog.Fatalf("error accessing flag %s for command %s: %v", flag, cmd.Name(), err)
}
return i
}
// Assumes the flag has a default value.
func GetFlagInt32(cmd *cobra.Command, flag string) int32 {
i, err := cmd.Flags().GetInt32(flag)
if err != nil {
klog.Fatalf("error accessing flag %s for command %s: %v", flag, cmd.Name(), err)
}
return i
}
// Assumes the flag has a default value.
func GetFlagInt64(cmd *cobra.Command, flag string) int64 {
i, err := cmd.Flags().GetInt64(flag)
if err != nil {
klog.Fatalf("error accessing flag %s for command %s: %v", flag, cmd.Name(), err)
}
return i
}
func GetFlagDuration(cmd *cobra.Command, flag string) time.Duration {
d, err := cmd.Flags().GetDuration(flag)
if err != nil {
klog.Fatalf("error accessing flag %s for command %s: %v", flag, cmd.Name(), err)
}
return d
}
func GetPodRunningTimeoutFlag(cmd *cobra.Command) (time.Duration, error) {
timeout := GetFlagDuration(cmd, "pod-running-timeout")
if timeout <= 0 {
return timeout, fmt.Errorf("--pod-running-timeout must be higher than zero")
}
return timeout, nil
}
func AddValidateFlags(cmd *cobra.Command) {
cmd.Flags().Bool("validate", true, "If true, use a schema to validate the input before sending it")
}
func AddValidateOptionFlags(cmd *cobra.Command, options *ValidateOptions) {
cmd.Flags().BoolVar(&options.EnableValidation, "validate", options.EnableValidation, "If true, use a schema to validate the input before sending it")
}
func AddFilenameOptionFlags(cmd *cobra.Command, options *resource.FilenameOptions, usage string) {
AddJsonFilenameFlag(cmd.Flags(), &options.Filenames, "Filename, directory, or URL to files "+usage)
AddKustomizeFlag(cmd.Flags(), &options.Kustomize)
cmd.Flags().BoolVarP(&options.Recursive, "recursive", "R", options.Recursive, "Process the directory used in -f, --filename recursively. Useful when you want to manage related manifests organized within the same directory.")
}
func AddJsonFilenameFlag(flags *pflag.FlagSet, value *[]string, usage string) {
flags.StringSliceVarP(value, "filename", "f", *value, usage)
annotations := make([]string, 0, len(resource.FileExtensions))
for _, ext := range resource.FileExtensions {
annotations = append(annotations, strings.TrimLeft(ext, "."))
}
flags.SetAnnotation("filename", cobra.BashCompFilenameExt, annotations)
}
// AddKustomizeFlag adds kustomize flag to a command
func AddKustomizeFlag(flags *pflag.FlagSet, value *string) {
flags.StringVarP(value, "kustomize", "k", *value, "Process the kustomization directory. This flag can't be used together with -f or -R.")
}
// AddDryRunFlag adds dry-run flag to a command. Usually used by mutations.
func AddDryRunFlag(cmd *cobra.Command) {
cmd.Flags().String(
"dry-run",
"none",
`Must be "none", "server", or "client". If client strategy, only print the object that would be sent, without sending it. If server strategy, submit server-side request without persisting the resource.`,
)
cmd.Flags().Lookup("dry-run").NoOptDefVal = "unchanged"
}
func AddServerSideApplyFlags(cmd *cobra.Command) {
cmd.Flags().Bool("server-side", false, "If true, apply runs in the server instead of the client.")
cmd.Flags().Bool("force-conflicts", false, "If true, server-side apply will force the changes against conflicts.")
cmd.Flags().String("field-manager", "kubectl", "Name of the manager used to track field ownership.")
}
func AddPodRunningTimeoutFlag(cmd *cobra.Command, defaultTimeout time.Duration) {
cmd.Flags().Duration("pod-running-timeout", defaultTimeout, "The length of time (like 5s, 2m, or 3h, higher than zero) to wait until at least one pod is running")
}
func AddApplyAnnotationFlags(cmd *cobra.Command) {
cmd.Flags().Bool(ApplyAnnotationsFlag, false, "If true, the configuration of current object will be saved in its annotation. Otherwise, the annotation will be unchanged. This flag is useful when you want to perform kubectl apply on this object in the future.")
}
func AddApplyAnnotationVarFlags(cmd *cobra.Command, applyAnnotation *bool) {
cmd.Flags().BoolVar(applyAnnotation, ApplyAnnotationsFlag, *applyAnnotation, "If true, the configuration of current object will be saved in its annotation. Otherwise, the annotation will be unchanged. This flag is useful when you want to perform kubectl apply on this object in the future.")
}
// AddGeneratorFlags adds flags common to resource generation commands
// TODO: need to take a pass at other generator commands to use this set of flags
func AddGeneratorFlags(cmd *cobra.Command, defaultGenerator string) {
cmd.Flags().String("generator", defaultGenerator, "The name of the API generator to use.")
cmd.Flags().MarkDeprecated("generator", "has no effect and will be removed in the future.")
AddDryRunFlag(cmd)
}
type ValidateOptions struct {
EnableValidation bool
}
// Merge requires JSON serialization
// TODO: merge assumes JSON serialization, and does not properly abstract API retrieval
func Merge(codec runtime.Codec, dst runtime.Object, fragment string) (runtime.Object, error) {
// encode dst into versioned json and apply fragment directly too it
target, err := runtime.Encode(codec, dst)
if err != nil {
return nil, err
}
patched, err := jsonpatch.MergePatch(target, []byte(fragment))
if err != nil {
return nil, err
}
out, err := runtime.Decode(codec, patched)
if err != nil {
return nil, err
}
return out, nil
}
// DumpReaderToFile writes all data from the given io.Reader to the specified file
// (usually for temporary use).
func DumpReaderToFile(reader io.Reader, filename string) error {
f, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
return err
}
defer f.Close()
buffer := make([]byte, 1024)
for {
count, err := reader.Read(buffer)
if err == io.EOF {
break
}
if err != nil {
return err
}
_, err = f.Write(buffer[:count])
if err != nil {
return err
}
}
return nil
}
func GetServerSideApplyFlag(cmd *cobra.Command) bool {
return GetFlagBool(cmd, "server-side")
}
func GetForceConflictsFlag(cmd *cobra.Command) bool {
return GetFlagBool(cmd, "force-conflicts")
}
func GetFieldManagerFlag(cmd *cobra.Command) string {
return GetFlagString(cmd, "field-manager")
}
type DryRunStrategy int
const (
DryRunNone DryRunStrategy = iota
DryRunClient
DryRunServer
)
func GetDryRunStrategy(cmd *cobra.Command) (DryRunStrategy, error) {
var dryRunFlag = GetFlagString(cmd, "dry-run")
b, err := strconv.ParseBool(dryRunFlag)
// The flag is not a boolean
if err != nil {
switch dryRunFlag {
case cmd.Flag("dry-run").NoOptDefVal:
klog.Warning(`--dry-run is deprecated and can be replaced with --dry-run=client.`)
return DryRunClient, nil
case "client":
return DryRunClient, nil
case "server":
return DryRunServer, nil
case "none":
return DryRunNone, nil
default:
return DryRunNone, fmt.Errorf(`Invalid dry-run value (%v). Must be "none", "server", or "client".`, dryRunFlag)
}
}
// The flag was a boolean
if b {
klog.Warningf(`--dry-run=%v is deprecated (boolean value) and can be replaced with --dry-run=%s.`, dryRunFlag, "client")
return DryRunClient, nil
}
klog.Warningf(`--dry-run=%v is deprecated (boolean value) and can be replaced with --dry-run=%s.`, dryRunFlag, "none")
return DryRunNone, nil
}
// PrintFlagsWithDryRunStrategy sets a success message at print time for the dry run strategy
//
// TODO(juanvallejo): This can be cleaned up even further by creating
// a PrintFlags struct that binds the --dry-run flag, and whose
// ToPrinter method returns a printer that understands how to print
// this success message.
func PrintFlagsWithDryRunStrategy(printFlags *genericclioptions.PrintFlags, dryRunStrategy DryRunStrategy) *genericclioptions.PrintFlags {
switch dryRunStrategy {
case DryRunClient:
printFlags.Complete("%s (dry run)")
case DryRunServer:
printFlags.Complete("%s (server dry run)")
}
return printFlags
}
// GetResourcesAndPairs retrieves resources and "KEY=VALUE or KEY-" pair args from given args
func GetResourcesAndPairs(args []string, pairType string) (resources []string, pairArgs []string, err error) {
foundPair := false
for _, s := range args {
nonResource := (strings.Contains(s, "=") && s[0] != '=') || (strings.HasSuffix(s, "-") && s != "-")
switch {
case !foundPair && nonResource:
foundPair = true
fallthrough
case foundPair && nonResource:
pairArgs = append(pairArgs, s)
case !foundPair && !nonResource:
resources = append(resources, s)
case foundPair && !nonResource:
err = fmt.Errorf("all resources must be specified before %s changes: %s", pairType, s)
return
}
}
return
}
// ParsePairs retrieves new and remove pairs (if supportRemove is true) from "KEY=VALUE or KEY-" pair args
func ParsePairs(pairArgs []string, pairType string, supportRemove bool) (newPairs map[string]string, removePairs []string, err error) {
newPairs = map[string]string{}
if supportRemove {
removePairs = []string{}
}
var invalidBuf bytes.Buffer
var invalidBufNonEmpty bool
for _, pairArg := range pairArgs {
if strings.Contains(pairArg, "=") && pairArg[0] != '=' {
parts := strings.SplitN(pairArg, "=", 2)
if len(parts) != 2 {
if invalidBufNonEmpty {
invalidBuf.WriteString(", ")
}
invalidBuf.WriteString(pairArg)
invalidBufNonEmpty = true
} else {
newPairs[parts[0]] = parts[1]
}
} else if supportRemove && strings.HasSuffix(pairArg, "-") && pairArg != "-" {
removePairs = append(removePairs, pairArg[:len(pairArg)-1])
} else {
if invalidBufNonEmpty {
invalidBuf.WriteString(", ")
}
invalidBuf.WriteString(pairArg)
invalidBufNonEmpty = true
}
}
if invalidBufNonEmpty {
err = fmt.Errorf("invalid %s format: %s", pairType, invalidBuf.String())
return
}
return
}
// IsSiblingCommandExists receives a pointer to a cobra command and a target string.
// Returns true if the target string is found in the list of sibling commands.
func IsSiblingCommandExists(cmd *cobra.Command, targetCmdName string) bool {
for _, c := range cmd.Parent().Commands() {
if c.Name() == targetCmdName {
return true
}
}
return false
}
// DefaultSubCommandRun prints a command's help string to the specified output if no
// arguments (sub-commands) are provided, or a usage error otherwise.
func DefaultSubCommandRun(out io.Writer) func(c *cobra.Command, args []string) {
return func(c *cobra.Command, args []string) {
c.SetOutput(out)
RequireNoArguments(c, args)
c.Help()
CheckErr(ErrExit)
}
}
// RequireNoArguments exits with a usage error if extra arguments are provided.
func RequireNoArguments(c *cobra.Command, args []string) {
if len(args) > 0 {
CheckErr(UsageErrorf(c, "unknown command %q", strings.Join(args, " ")))
}
}
// StripComments will transform a YAML file into JSON, thus dropping any comments
// in it. Note that if the given file has a syntax error, the transformation will
// fail and we will manually drop all comments from the file.
func StripComments(file []byte) []byte {
stripped := file
stripped, err := yaml.ToJSON(stripped)
if err != nil {
stripped = ManualStrip(file)
}
return stripped
}
// ManualStrip is used for dropping comments from a YAML file
func ManualStrip(file []byte) []byte {
stripped := []byte{}
lines := bytes.Split(file, []byte("\n"))
for i, line := range lines {
if bytes.HasPrefix(bytes.TrimSpace(line), []byte("#")) {
continue
}
stripped = append(stripped, line...)
if i < len(lines)-1 {
stripped = append(stripped, '\n')
}
}
return stripped
}
// ScaleClientFunc provides a ScalesGetter
type ScaleClientFunc func(genericclioptions.RESTClientGetter) (scale.ScalesGetter, error)
// ScaleClientFn gives a way to easily override the function for unit testing if needed.
var ScaleClientFn ScaleClientFunc = scaleClient
// scaleClient gives you back scale getter
func scaleClient(restClientGetter genericclioptions.RESTClientGetter) (scale.ScalesGetter, error) {
discoveryClient, err := restClientGetter.ToDiscoveryClient()
if err != nil {
return nil, err
}
clientConfig, err := restClientGetter.ToRESTConfig()
if err != nil {
return nil, err
}
setKubernetesDefaults(clientConfig)
restClient, err := rest.RESTClientFor(clientConfig)
if err != nil {
return nil, err
}
resolver := scale.NewDiscoveryScaleKindResolver(discoveryClient)
mapper, err := restClientGetter.ToRESTMapper()
if err != nil {
return nil, err
}
return scale.New(restClient, mapper, dynamic.LegacyAPIPathResolverFunc, resolver), nil
}
func Warning(cmdErr io.Writer, newGeneratorName, oldGeneratorName string) {
fmt.Fprintf(cmdErr, "WARNING: New generator %q specified, "+
"but it isn't available. "+
"Falling back to %q.\n",
newGeneratorName,
oldGeneratorName,
)
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0"?>
<Document style="helpdocument">
<p style="htitle">Obair</p>
<p style="hp">Solairidh daoine a bhios a' fuireach ann an còmhnaidh obair. Chleachdaidh a' mhòrchuid a raointean obair ach an obraich iad. Chan eil cuid a raointean a leithid carraighean feumach air obair ach fhad 's a thèid an togail.</p>
<p style="hp">'S urrainn dha mhargaidean obair a sholar nuair a bhios tu air margaid a thogail am fagas an raoin a bhios feumach air obair. Feumaidh a' mhargaid an obair fhaighinn o àite air choireigin co-dhiù 's tha dà dhòigh ann: air an dàrna làimh, ma tha còmhnaidh am fagas na margaid, solairidh e obair dhan mhargaid an coimeas ris an t-sluagh a bhios a' fuireach ann is na h-àireamh de mhargaidean a cho-roinneas an obair. 'S e giùlan a tha san dòigh eile. Ma tha barrachd obrach aig margaid na tha i feumach air, cuiridh i cuid dhiubh air a' ghiùlan. Gheibh margaid sam bith a bhios ceangailte ris a' ghiùlan traca, rathaid no rèile greim air an obair ma bhios i feumach oirre.</p>
<p style="hp">Ma dh'fhàsas magaid "làn" de dh'obair cha ghabh i rithe tuilleadh o chòmhnaidhean. Bidh daoine gun chosnadh ann nuair a thachras seo is gun mhargaid nach eil làn am fagas.</p>
<p style="hp">Cuiridh ionad-slàinte am fagas còmhnaidh ris an uiread de dh'obair a thèid a sholar do mhagaidean ionadail.</p>
<p style="hsubtitle">Faic cuideachd:</p>
<li><a href="commodities">Bathar-amh</a></li>
<li><a href="residential">Còmhnaidhean</a></li>
<li><a href="transport">Giùlan</a></li>
<li><a href="market">Margaid</a></li>
</Document>
| {
"pile_set_name": "Github"
} |
name=Sewers of Estark
image=https://magiccards.info/scans/en/mbp/2.jpg
value=2.781
rarity=R
type=Instant
cost={2}{B}{B}
ability=Cast SN with AI only during combat.
timing=pump
oracle=Choose target creature. If it's attacking, it can't be blocked this turn. If it's blocking, prevent all combat damage that would be dealt this combat by it and each creature it's blocking.
requires_groovy_code
| {
"pile_set_name": "Github"
} |
/* dporfsx.f -- translated by f2c (version 20061008).
You must link the resulting object file with libf2c:
on Microsoft Windows system, link with libf2c.lib;
on Linux or Unix systems, link with .../path/to/libf2c.a -lm
or, if you install libf2c.a in a standard place, with -lf2c -lm
-- in that order, at the end of the command line, as in
cc *.o -lf2c -lm
Source for libf2c is in /netlib/f2c/libf2c.zip, e.g.,
http://www.netlib.org/f2c/libf2c.zip
*/
#include "f2c.h"
#include "blaswrap.h"
/* Table of constant values */
static integer c_n1 = -1;
static integer c__0 = 0;
static integer c__1 = 1;
/* Subroutine */ int dporfsx_(char *uplo, char *equed, integer *n, integer *
nrhs, doublereal *a, integer *lda, doublereal *af, integer *ldaf,
doublereal *s, doublereal *b, integer *ldb, doublereal *x, integer *
ldx, doublereal *rcond, doublereal *berr, integer *n_err_bnds__,
doublereal *err_bnds_norm__, doublereal *err_bnds_comp__, integer *
nparams, doublereal *params, doublereal *work, integer *iwork,
integer *info)
{
/* System generated locals */
integer a_dim1, a_offset, af_dim1, af_offset, b_dim1, b_offset, x_dim1,
x_offset, err_bnds_norm_dim1, err_bnds_norm_offset,
err_bnds_comp_dim1, err_bnds_comp_offset, i__1;
doublereal d__1, d__2;
/* Builtin functions */
double sqrt(doublereal);
/* Local variables */
doublereal illrcond_thresh__, unstable_thresh__, err_lbnd__;
integer ref_type__, j;
doublereal rcond_tmp__;
integer prec_type__;
extern doublereal dla_porcond__(char *, integer *, doublereal *, integer *
, doublereal *, integer *, integer *, doublereal *, integer *,
doublereal *, integer *, ftnlen);
doublereal cwise_wrong__;
extern /* Subroutine */ int dla_porfsx_extended__(integer *, char *,
integer *, integer *, doublereal *, integer *, doublereal *,
integer *, logical *, doublereal *, doublereal *, integer *,
doublereal *, integer *, doublereal *, integer *, doublereal *,
doublereal *, doublereal *, doublereal *, doublereal *,
doublereal *, doublereal *, integer *, doublereal *, doublereal *,
logical *, integer *, ftnlen);
char norm[1];
logical ignore_cwise__;
extern logical lsame_(char *, char *);
doublereal anorm;
logical rcequ;
extern doublereal dlamch_(char *);
extern /* Subroutine */ int xerbla_(char *, integer *), dpocon_(
char *, integer *, doublereal *, integer *, doublereal *,
doublereal *, doublereal *, integer *, integer *);
extern doublereal dlansy_(char *, char *, integer *, doublereal *,
integer *, doublereal *);
extern integer ilaprec_(char *);
integer ithresh, n_norms__;
doublereal rthresh;
/* -- LAPACK routine (version 3.2.1) -- */
/* -- Contributed by James Demmel, Deaglan Halligan, Yozo Hida and -- */
/* -- Jason Riedy of Univ. of California Berkeley. -- */
/* -- April 2009 -- */
/* -- LAPACK is a software package provided by Univ. of Tennessee, -- */
/* -- Univ. of California Berkeley and NAG Ltd. -- */
/* .. */
/* .. Scalar Arguments .. */
/* .. */
/* .. Array Arguments .. */
/* .. */
/* Purpose */
/* ======= */
/* DPORFSX improves the computed solution to a system of linear */
/* equations when the coefficient matrix is symmetric positive */
/* definite, and provides error bounds and backward error estimates */
/* for the solution. In addition to normwise error bound, the code */
/* provides maximum componentwise error bound if possible. See */
/* comments for ERR_BNDS_NORM and ERR_BNDS_COMP for details of the */
/* error bounds. */
/* The original system of linear equations may have been equilibrated */
/* before calling this routine, as described by arguments EQUED and S */
/* below. In this case, the solution and error bounds returned are */
/* for the original unequilibrated system. */
/* Arguments */
/* ========= */
/* Some optional parameters are bundled in the PARAMS array. These */
/* settings determine how refinement is performed, but often the */
/* defaults are acceptable. If the defaults are acceptable, users */
/* can pass NPARAMS = 0 which prevents the source code from accessing */
/* the PARAMS argument. */
/* UPLO (input) CHARACTER*1 */
/* = 'U': Upper triangle of A is stored; */
/* = 'L': Lower triangle of A is stored. */
/* EQUED (input) CHARACTER*1 */
/* Specifies the form of equilibration that was done to A */
/* before calling this routine. This is needed to compute */
/* the solution and error bounds correctly. */
/* = 'N': No equilibration */
/* = 'Y': Both row and column equilibration, i.e., A has been */
/* replaced by diag(S) * A * diag(S). */
/* The right hand side B has been changed accordingly. */
/* N (input) INTEGER */
/* The order of the matrix A. N >= 0. */
/* NRHS (input) INTEGER */
/* The number of right hand sides, i.e., the number of columns */
/* of the matrices B and X. NRHS >= 0. */
/* A (input) DOUBLE PRECISION array, dimension (LDA,N) */
/* The symmetric matrix A. If UPLO = 'U', the leading N-by-N */
/* upper triangular part of A contains the upper triangular part */
/* of the matrix A, and the strictly lower triangular part of A */
/* is not referenced. If UPLO = 'L', the leading N-by-N lower */
/* triangular part of A contains the lower triangular part of */
/* the matrix A, and the strictly upper triangular part of A is */
/* not referenced. */
/* LDA (input) INTEGER */
/* The leading dimension of the array A. LDA >= max(1,N). */
/* AF (input) DOUBLE PRECISION array, dimension (LDAF,N) */
/* The triangular factor U or L from the Cholesky factorization */
/* A = U**T*U or A = L*L**T, as computed by DPOTRF. */
/* LDAF (input) INTEGER */
/* The leading dimension of the array AF. LDAF >= max(1,N). */
/* S (input or output) DOUBLE PRECISION array, dimension (N) */
/* The row scale factors for A. If EQUED = 'Y', A is multiplied on */
/* the left and right by diag(S). S is an input argument if FACT = */
/* 'F'; otherwise, S is an output argument. If FACT = 'F' and EQUED */
/* = 'Y', each element of S must be positive. If S is output, each */
/* element of S is a power of the radix. If S is input, each element */
/* of S should be a power of the radix to ensure a reliable solution */
/* and error estimates. Scaling by powers of the radix does not cause */
/* rounding errors unless the result underflows or overflows. */
/* Rounding errors during scaling lead to refining with a matrix that */
/* is not equivalent to the input matrix, producing error estimates */
/* that may not be reliable. */
/* B (input) DOUBLE PRECISION array, dimension (LDB,NRHS) */
/* The right hand side matrix B. */
/* LDB (input) INTEGER */
/* The leading dimension of the array B. LDB >= max(1,N). */
/* X (input/output) DOUBLE PRECISION array, dimension (LDX,NRHS) */
/* On entry, the solution matrix X, as computed by DGETRS. */
/* On exit, the improved solution matrix X. */
/* LDX (input) INTEGER */
/* The leading dimension of the array X. LDX >= max(1,N). */
/* RCOND (output) DOUBLE PRECISION */
/* Reciprocal scaled condition number. This is an estimate of the */
/* reciprocal Skeel condition number of the matrix A after */
/* equilibration (if done). If this is less than the machine */
/* precision (in particular, if it is zero), the matrix is singular */
/* to working precision. Note that the error may still be small even */
/* if this number is very small and the matrix appears ill- */
/* conditioned. */
/* BERR (output) DOUBLE PRECISION array, dimension (NRHS) */
/* Componentwise relative backward error. This is the */
/* componentwise relative backward error of each solution vector X(j) */
/* (i.e., the smallest relative change in any element of A or B that */
/* makes X(j) an exact solution). */
/* N_ERR_BNDS (input) INTEGER */
/* Number of error bounds to return for each right hand side */
/* and each type (normwise or componentwise). See ERR_BNDS_NORM and */
/* ERR_BNDS_COMP below. */
/* ERR_BNDS_NORM (output) DOUBLE PRECISION array, dimension (NRHS, N_ERR_BNDS) */
/* For each right-hand side, this array contains information about */
/* various error bounds and condition numbers corresponding to the */
/* normwise relative error, which is defined as follows: */
/* Normwise relative error in the ith solution vector: */
/* max_j (abs(XTRUE(j,i) - X(j,i))) */
/* ------------------------------ */
/* max_j abs(X(j,i)) */
/* The array is indexed by the type of error information as described */
/* below. There currently are up to three pieces of information */
/* returned. */
/* The first index in ERR_BNDS_NORM(i,:) corresponds to the ith */
/* right-hand side. */
/* The second index in ERR_BNDS_NORM(:,err) contains the following */
/* three fields: */
/* err = 1 "Trust/don't trust" boolean. Trust the answer if the */
/* reciprocal condition number is less than the threshold */
/* sqrt(n) * dlamch('Epsilon'). */
/* err = 2 "Guaranteed" error bound: The estimated forward error, */
/* almost certainly within a factor of 10 of the true error */
/* so long as the next entry is greater than the threshold */
/* sqrt(n) * dlamch('Epsilon'). This error bound should only */
/* be trusted if the previous boolean is true. */
/* err = 3 Reciprocal condition number: Estimated normwise */
/* reciprocal condition number. Compared with the threshold */
/* sqrt(n) * dlamch('Epsilon') to determine if the error */
/* estimate is "guaranteed". These reciprocal condition */
/* numbers are 1 / (norm(Z^{-1},inf) * norm(Z,inf)) for some */
/* appropriately scaled matrix Z. */
/* Let Z = S*A, where S scales each row by a power of the */
/* radix so all absolute row sums of Z are approximately 1. */
/* See Lapack Working Note 165 for further details and extra */
/* cautions. */
/* ERR_BNDS_COMP (output) DOUBLE PRECISION array, dimension (NRHS, N_ERR_BNDS) */
/* For each right-hand side, this array contains information about */
/* various error bounds and condition numbers corresponding to the */
/* componentwise relative error, which is defined as follows: */
/* Componentwise relative error in the ith solution vector: */
/* abs(XTRUE(j,i) - X(j,i)) */
/* max_j ---------------------- */
/* abs(X(j,i)) */
/* The array is indexed by the right-hand side i (on which the */
/* componentwise relative error depends), and the type of error */
/* information as described below. There currently are up to three */
/* pieces of information returned for each right-hand side. If */
/* componentwise accuracy is not requested (PARAMS(3) = 0.0), then */
/* ERR_BNDS_COMP is not accessed. If N_ERR_BNDS .LT. 3, then at most */
/* the first (:,N_ERR_BNDS) entries are returned. */
/* The first index in ERR_BNDS_COMP(i,:) corresponds to the ith */
/* right-hand side. */
/* The second index in ERR_BNDS_COMP(:,err) contains the following */
/* three fields: */
/* err = 1 "Trust/don't trust" boolean. Trust the answer if the */
/* reciprocal condition number is less than the threshold */
/* sqrt(n) * dlamch('Epsilon'). */
/* err = 2 "Guaranteed" error bound: The estimated forward error, */
/* almost certainly within a factor of 10 of the true error */
/* so long as the next entry is greater than the threshold */
/* sqrt(n) * dlamch('Epsilon'). This error bound should only */
/* be trusted if the previous boolean is true. */
/* err = 3 Reciprocal condition number: Estimated componentwise */
/* reciprocal condition number. Compared with the threshold */
/* sqrt(n) * dlamch('Epsilon') to determine if the error */
/* estimate is "guaranteed". These reciprocal condition */
/* numbers are 1 / (norm(Z^{-1},inf) * norm(Z,inf)) for some */
/* appropriately scaled matrix Z. */
/* Let Z = S*(A*diag(x)), where x is the solution for the */
/* current right-hand side and S scales each row of */
/* A*diag(x) by a power of the radix so all absolute row */
/* sums of Z are approximately 1. */
/* See Lapack Working Note 165 for further details and extra */
/* cautions. */
/* NPARAMS (input) INTEGER */
/* Specifies the number of parameters set in PARAMS. If .LE. 0, the */
/* PARAMS array is never referenced and default values are used. */
/* PARAMS (input / output) DOUBLE PRECISION array, dimension NPARAMS */
/* Specifies algorithm parameters. If an entry is .LT. 0.0, then */
/* that entry will be filled with default value used for that */
/* parameter. Only positions up to NPARAMS are accessed; defaults */
/* are used for higher-numbered parameters. */
/* PARAMS(LA_LINRX_ITREF_I = 1) : Whether to perform iterative */
/* refinement or not. */
/* Default: 1.0D+0 */
/* = 0.0 : No refinement is performed, and no error bounds are */
/* computed. */
/* = 1.0 : Use the double-precision refinement algorithm, */
/* possibly with doubled-single computations if the */
/* compilation environment does not support DOUBLE */
/* PRECISION. */
/* (other values are reserved for future use) */
/* PARAMS(LA_LINRX_ITHRESH_I = 2) : Maximum number of residual */
/* computations allowed for refinement. */
/* Default: 10 */
/* Aggressive: Set to 100 to permit convergence using approximate */
/* factorizations or factorizations other than LU. If */
/* the factorization uses a technique other than */
/* Gaussian elimination, the guarantees in */
/* err_bnds_norm and err_bnds_comp may no longer be */
/* trustworthy. */
/* PARAMS(LA_LINRX_CWISE_I = 3) : Flag determining if the code */
/* will attempt to find a solution with small componentwise */
/* relative error in the double-precision algorithm. Positive */
/* is true, 0.0 is false. */
/* Default: 1.0 (attempt componentwise convergence) */
/* WORK (workspace) DOUBLE PRECISION array, dimension (4*N) */
/* IWORK (workspace) INTEGER array, dimension (N) */
/* INFO (output) INTEGER */
/* = 0: Successful exit. The solution to every right-hand side is */
/* guaranteed. */
/* < 0: If INFO = -i, the i-th argument had an illegal value */
/* > 0 and <= N: U(INFO,INFO) is exactly zero. The factorization */
/* has been completed, but the factor U is exactly singular, so */
/* the solution and error bounds could not be computed. RCOND = 0 */
/* is returned. */
/* = N+J: The solution corresponding to the Jth right-hand side is */
/* not guaranteed. The solutions corresponding to other right- */
/* hand sides K with K > J may not be guaranteed as well, but */
/* only the first such right-hand side is reported. If a small */
/* componentwise error is not requested (PARAMS(3) = 0.0) then */
/* the Jth right-hand side is the first with a normwise error */
/* bound that is not guaranteed (the smallest J such */
/* that ERR_BNDS_NORM(J,1) = 0.0). By default (PARAMS(3) = 1.0) */
/* the Jth right-hand side is the first with either a normwise or */
/* componentwise error bound that is not guaranteed (the smallest */
/* J such that either ERR_BNDS_NORM(J,1) = 0.0 or */
/* ERR_BNDS_COMP(J,1) = 0.0). See the definition of */
/* ERR_BNDS_NORM(:,1) and ERR_BNDS_COMP(:,1). To get information */
/* about all of the right-hand sides check ERR_BNDS_NORM or */
/* ERR_BNDS_COMP. */
/* ================================================================== */
/* .. Parameters .. */
/* .. */
/* .. Local Scalars .. */
/* .. */
/* .. External Subroutines .. */
/* .. */
/* .. Intrinsic Functions .. */
/* .. */
/* .. External Functions .. */
/* .. */
/* .. Executable Statements .. */
/* Check the input parameters. */
/* Parameter adjustments */
err_bnds_comp_dim1 = *nrhs;
err_bnds_comp_offset = 1 + err_bnds_comp_dim1;
err_bnds_comp__ -= err_bnds_comp_offset;
err_bnds_norm_dim1 = *nrhs;
err_bnds_norm_offset = 1 + err_bnds_norm_dim1;
err_bnds_norm__ -= err_bnds_norm_offset;
a_dim1 = *lda;
a_offset = 1 + a_dim1;
a -= a_offset;
af_dim1 = *ldaf;
af_offset = 1 + af_dim1;
af -= af_offset;
--s;
b_dim1 = *ldb;
b_offset = 1 + b_dim1;
b -= b_offset;
x_dim1 = *ldx;
x_offset = 1 + x_dim1;
x -= x_offset;
--berr;
--params;
--work;
--iwork;
/* Function Body */
*info = 0;
ref_type__ = 1;
if (*nparams >= 1) {
if (params[1] < 0.) {
params[1] = 1.;
} else {
ref_type__ = (integer) params[1];
}
}
/* Set default parameters. */
illrcond_thresh__ = (doublereal) (*n) * dlamch_("Epsilon");
ithresh = 10;
rthresh = .5;
unstable_thresh__ = .25;
ignore_cwise__ = FALSE_;
if (*nparams >= 2) {
if (params[2] < 0.) {
params[2] = (doublereal) ithresh;
} else {
ithresh = (integer) params[2];
}
}
if (*nparams >= 3) {
if (params[3] < 0.) {
if (ignore_cwise__) {
params[3] = 0.;
} else {
params[3] = 1.;
}
} else {
ignore_cwise__ = params[3] == 0.;
}
}
if (ref_type__ == 0 || *n_err_bnds__ == 0) {
n_norms__ = 0;
} else if (ignore_cwise__) {
n_norms__ = 1;
} else {
n_norms__ = 2;
}
rcequ = lsame_(equed, "Y");
/* Test input parameters. */
if (! lsame_(uplo, "U") && ! lsame_(uplo, "L")) {
*info = -1;
} else if (! rcequ && ! lsame_(equed, "N")) {
*info = -2;
} else if (*n < 0) {
*info = -3;
} else if (*nrhs < 0) {
*info = -4;
} else if (*lda < max(1,*n)) {
*info = -6;
} else if (*ldaf < max(1,*n)) {
*info = -8;
} else if (*ldb < max(1,*n)) {
*info = -11;
} else if (*ldx < max(1,*n)) {
*info = -13;
}
if (*info != 0) {
i__1 = -(*info);
xerbla_("DPORFSX", &i__1);
return 0;
}
/* Quick return if possible. */
if (*n == 0 || *nrhs == 0) {
*rcond = 1.;
i__1 = *nrhs;
for (j = 1; j <= i__1; ++j) {
berr[j] = 0.;
if (*n_err_bnds__ >= 1) {
err_bnds_norm__[j + err_bnds_norm_dim1] = 1.;
err_bnds_comp__[j + err_bnds_comp_dim1] = 1.;
} else if (*n_err_bnds__ >= 2) {
err_bnds_norm__[j + (err_bnds_norm_dim1 << 1)] = 0.;
err_bnds_comp__[j + (err_bnds_comp_dim1 << 1)] = 0.;
} else if (*n_err_bnds__ >= 3) {
err_bnds_norm__[j + err_bnds_norm_dim1 * 3] = 1.;
err_bnds_comp__[j + err_bnds_comp_dim1 * 3] = 1.;
}
}
return 0;
}
/* Default to failure. */
*rcond = 0.;
i__1 = *nrhs;
for (j = 1; j <= i__1; ++j) {
berr[j] = 1.;
if (*n_err_bnds__ >= 1) {
err_bnds_norm__[j + err_bnds_norm_dim1] = 1.;
err_bnds_comp__[j + err_bnds_comp_dim1] = 1.;
} else if (*n_err_bnds__ >= 2) {
err_bnds_norm__[j + (err_bnds_norm_dim1 << 1)] = 1.;
err_bnds_comp__[j + (err_bnds_comp_dim1 << 1)] = 1.;
} else if (*n_err_bnds__ >= 3) {
err_bnds_norm__[j + err_bnds_norm_dim1 * 3] = 0.;
err_bnds_comp__[j + err_bnds_comp_dim1 * 3] = 0.;
}
}
/* Compute the norm of A and the reciprocal of the condition */
/* number of A. */
*(unsigned char *)norm = 'I';
anorm = dlansy_(norm, uplo, n, &a[a_offset], lda, &work[1]);
dpocon_(uplo, n, &af[af_offset], ldaf, &anorm, rcond, &work[1], &iwork[1],
info);
/* Perform refinement on each right-hand side */
if (ref_type__ != 0) {
prec_type__ = ilaprec_("E");
dla_porfsx_extended__(&prec_type__, uplo, n, nrhs, &a[a_offset], lda,
&af[af_offset], ldaf, &rcequ, &s[1], &b[b_offset], ldb, &x[
x_offset], ldx, &berr[1], &n_norms__, &err_bnds_norm__[
err_bnds_norm_offset], &err_bnds_comp__[err_bnds_comp_offset],
&work[*n + 1], &work[1], &work[(*n << 1) + 1], &work[1],
rcond, &ithresh, &rthresh, &unstable_thresh__, &
ignore_cwise__, info, (ftnlen)1);
}
/* Computing MAX */
d__1 = 10., d__2 = sqrt((doublereal) (*n));
err_lbnd__ = max(d__1,d__2) * dlamch_("Epsilon");
if (*n_err_bnds__ >= 1 && n_norms__ >= 1) {
/* Compute scaled normwise condition number cond(A*C). */
if (rcequ) {
rcond_tmp__ = dla_porcond__(uplo, n, &a[a_offset], lda, &af[
af_offset], ldaf, &c_n1, &s[1], info, &work[1], &iwork[1],
(ftnlen)1);
} else {
rcond_tmp__ = dla_porcond__(uplo, n, &a[a_offset], lda, &af[
af_offset], ldaf, &c__0, &s[1], info, &work[1], &iwork[1],
(ftnlen)1);
}
i__1 = *nrhs;
for (j = 1; j <= i__1; ++j) {
/* Cap the error at 1.0. */
if (*n_err_bnds__ >= 2 && err_bnds_norm__[j + (err_bnds_norm_dim1
<< 1)] > 1.) {
err_bnds_norm__[j + (err_bnds_norm_dim1 << 1)] = 1.;
}
/* Threshold the error (see LAWN). */
if (rcond_tmp__ < illrcond_thresh__) {
err_bnds_norm__[j + (err_bnds_norm_dim1 << 1)] = 1.;
err_bnds_norm__[j + err_bnds_norm_dim1] = 0.;
if (*info <= *n) {
*info = *n + j;
}
} else if (err_bnds_norm__[j + (err_bnds_norm_dim1 << 1)] <
err_lbnd__) {
err_bnds_norm__[j + (err_bnds_norm_dim1 << 1)] = err_lbnd__;
err_bnds_norm__[j + err_bnds_norm_dim1] = 1.;
}
/* Save the condition number. */
if (*n_err_bnds__ >= 3) {
err_bnds_norm__[j + err_bnds_norm_dim1 * 3] = rcond_tmp__;
}
}
}
if (*n_err_bnds__ >= 1 && n_norms__ >= 2) {
/* Compute componentwise condition number cond(A*diag(Y(:,J))) for */
/* each right-hand side using the current solution as an estimate of */
/* the true solution. If the componentwise error estimate is too */
/* large, then the solution is a lousy estimate of truth and the */
/* estimated RCOND may be too optimistic. To avoid misleading users, */
/* the inverse condition number is set to 0.0 when the estimated */
/* cwise error is at least CWISE_WRONG. */
cwise_wrong__ = sqrt(dlamch_("Epsilon"));
i__1 = *nrhs;
for (j = 1; j <= i__1; ++j) {
if (err_bnds_comp__[j + (err_bnds_comp_dim1 << 1)] <
cwise_wrong__) {
rcond_tmp__ = dla_porcond__(uplo, n, &a[a_offset], lda, &af[
af_offset], ldaf, &c__1, &x[j * x_dim1 + 1], info, &
work[1], &iwork[1], (ftnlen)1);
} else {
rcond_tmp__ = 0.;
}
/* Cap the error at 1.0. */
if (*n_err_bnds__ >= 2 && err_bnds_comp__[j + (err_bnds_comp_dim1
<< 1)] > 1.) {
err_bnds_comp__[j + (err_bnds_comp_dim1 << 1)] = 1.;
}
/* Threshold the error (see LAWN). */
if (rcond_tmp__ < illrcond_thresh__) {
err_bnds_comp__[j + (err_bnds_comp_dim1 << 1)] = 1.;
err_bnds_comp__[j + err_bnds_comp_dim1] = 0.;
if (params[3] == 1. && *info < *n + j) {
*info = *n + j;
}
} else if (err_bnds_comp__[j + (err_bnds_comp_dim1 << 1)] <
err_lbnd__) {
err_bnds_comp__[j + (err_bnds_comp_dim1 << 1)] = err_lbnd__;
err_bnds_comp__[j + err_bnds_comp_dim1] = 1.;
}
/* Save the condition number. */
if (*n_err_bnds__ >= 3) {
err_bnds_comp__[j + err_bnds_comp_dim1 * 3] = rcond_tmp__;
}
}
}
return 0;
/* End of DPORFSX */
} /* dporfsx_ */
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<data name="SplitButtonSecondaryButtonName" xml:space="preserve">
<value>Máis opcións</value>
<comment>Automation name for the secondary button.</comment>
</data>
</root> | {
"pile_set_name": "Github"
} |
class GenerateConsoleViewTask < Rake::TaskLib
attr_accessor :layout, :views
def initialize(name)
yield self if block_given?
define(name)
end
def define(name)
task name => [:environment] do
views.each_pair do |view_path, file|
File.open(File.join(Rails.root, 'public', file), 'w') do |f|
f.write(render(view_path))
end
end
end
end
protected
def render(template)
view.render :template => template.dup, :layout => layout
end
def controller_class
ConsoleController
end
def controller
controller = controller_class.new
controller.request = ActionDispatch::TestRequest.new({'SCRIPT_NAME' => ENV['RAILS_RELATIVE_URL_ROOT']})
controller.request.host = host
controller.env = controller.request.env
controller
end
def add_view_helpers(view, routes)
view.class_eval do
include routes.url_helpers
include Console::CommunityAware
include Console::LayoutHelper
include Console::HelpHelper
include Console::Html5BoilerplateHelper
include Console::ModelHelper
include Console::SecuredHelper
include Console::CommunityHelper
include Console::ConsoleHelper
def active_tab
nil
end
def account_settings_redirect
account_path
end
end
end
def subclass_view(view, routes)
host = self.host
view.class_eval do
def protect_against_forgery?
false
end
def default_url_options
{:host => host}
end
end
end
def view
view = ActionView::Base.new(ActionController::Base.view_paths, {}, controller)
routes = Rails.application.routes
routes.default_url_options = {:host => self.host}
add_view_helpers(view, routes)
subclass_view(view, routes)
view
end
def host
ENV['RAILS_HOST'] || 'localhost'
end
end
namespace :assets do
GenerateConsoleViewTask.new(:public_pages) do |t|
t.layout = 'layouts/console'
t.views = {
'console/not_found' => '404.html',
'console/error' => '500.html',
}
end
end
| {
"pile_set_name": "Github"
} |
(function() {
var Gitlab, credentials, gitlab;
process.stdout.write('\u001B[2J\u001B[0;0f');
Gitlab = require('..');
credentials = require('./credentials');
gitlab = new Gitlab({
url: credentials.url,
token: credentials.token
});
gitlab.projects.all(function(projects) {
var _project, i, len, results;
results = [];
for (i = 0, len = projects.length; i < len; i++) {
_project = projects[i];
results.push((function() {
var project;
project = _project;
return gitlab.projects.hooks.list(project.id, function(hooks) {
var hook, j, len1, url;
url = "" + credentials.service_hook_base + project.path_with_namespace;
if (hooks.length > 1) {
return console.log(url + " too much hooks");
} else if (hooks.length === 1) {
for (j = 0, len1 = hooks.length; j < len1; j++) {
hook = hooks[j];
if (hook.url !== url) {
gitlab.projects.hooks.remove(project.id, hook.id, function(ret) {
return console.log(ret);
});
}
}
return console.log(url + " is already OK");
} else {
return gitlab.projects.hooks.add(project.id, url, function() {
return console.log(url + " has been added");
});
}
});
})());
}
return results;
});
}).call(this);
| {
"pile_set_name": "Github"
} |
namespace GraphQL.Types.Relay
{
public class EdgeType<TNodeType> : ObjectGraphType<object>
where TNodeType : IGraphType
{
public EdgeType()
{
Name = string.Format("{0}Edge", typeof(TNodeType).GraphQLName());
Description = string.Format(
"An edge in a connection from an object to another object of type `{0}`.",
typeof(TNodeType).GraphQLName());
Field<NonNullGraphType<StringGraphType>>()
.Name("cursor")
.Description("A cursor for use in pagination");
Field<TNodeType>()
.Name("node")
.Description("The item at the end of the edge");
}
}
}
| {
"pile_set_name": "Github"
} |
In this game, you are robot (#). Your job is to find kitten. This task
is complicated by the existence of various things which are not kitten.
Robot must touch items to determine if they are kitten or not. The game
ends when robotfindskitten.
| {
"pile_set_name": "Github"
} |
package com.uwsoft.editor.proxy;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map.Entry;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Pixmap;
import com.badlogic.gdx.graphics.Texture;
import com.uwsoft.editor.renderer.data.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import com.badlogic.gdx.Files;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import com.badlogic.gdx.graphics.g2d.ParticleEffect;
import com.badlogic.gdx.graphics.g2d.TextureAtlas;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.graphics.g2d.freetype.FreeTypeFontGenerator;
import com.badlogic.gdx.graphics.glutils.ShaderProgram;
import com.badlogic.gdx.utils.Json;
import com.puremvc.patterns.proxy.BaseProxy;
import com.uwsoft.editor.data.SpineAnimData;
import com.uwsoft.editor.Overlap2DFacade;
import com.uwsoft.editor.renderer.resources.FontSizePair;
import com.uwsoft.editor.renderer.resources.IResourceRetriever;
import com.uwsoft.editor.renderer.utils.MySkin;
/**
* Created by azakhary on 4/26/2015.
*/
public class ResourceManager extends BaseProxy implements IResourceRetriever {
public String packResolutionName = "orig";
private static final String TAG = ResourceManager.class.getCanonicalName();
public static final String NAME = TAG;
private HashMap<String, ParticleEffect> particleEffects = new HashMap<String, ParticleEffect>(1);
private TextureAtlas currentProjectAtlas;
private HashMap<String, SpineAnimData> spineAnimAtlases = new HashMap<String, SpineAnimData>();
private HashMap<String, TextureAtlas> spriteAnimAtlases = new HashMap<String, TextureAtlas>();
private HashMap<String, FileHandle> spriterAnimFiles = new HashMap<String, FileHandle>();
private HashMap<FontSizePair, BitmapFont> bitmapFonts = new HashMap<>();
private HashMap<String, ShaderProgram> shaderPrograms = new HashMap<String, ShaderProgram>(1);
private TextureRegion defaultRegion;
private ResolutionManager resolutionManager;
public ResourceManager() {
super(NAME);
}
@Override
public void onRegister() {
super.onRegister();
facade = Overlap2DFacade.getInstance();
resolutionManager = facade.retrieveProxy(ResolutionManager.NAME);
// TODO: substitute this with "NO IMAGE" icon
Pixmap pixmap = new Pixmap(50, 50, Pixmap.Format.RGBA8888);
pixmap.setColor(new Color(1, 1, 1, 0.4f));
pixmap.fill();
defaultRegion = new TextureRegion(new Texture(pixmap));
}
@Override
public TextureRegion getTextureRegion(String name) {
TextureRegion reg = currentProjectAtlas.findRegion(name);
if(reg == null) {
reg = defaultRegion;
}
return reg;
}
public TextureAtlas getTextureAtlas() {
return currentProjectAtlas;
}
@Override
public ParticleEffect getParticleEffect(String name) {
return new ParticleEffect(particleEffects.get(name));
}
@Override
public TextureAtlas getSkeletonAtlas(String animationName) {
SpineAnimData animData = spineAnimAtlases.get(animationName);
return animData.atlas;
}
/**
* Sets working resolution, please set before doing any loading
* @param resolution String resolution name, default is "orig" later use resolution names created in editor
*/
public void setWorkingResolution(String resolution) {
ResolutionEntryVO resolutionObject = getProjectVO().getResolution("resolutionName");
if(resolutionObject != null) {
packResolutionName = resolution;
}
}
@Override
public FileHandle getSkeletonJSON(String animationName) {
SpineAnimData animData = spineAnimAtlases.get(animationName);
return animData.jsonFile;
}
@Override
public FileHandle getSCMLFile(String name) {
return spriterAnimFiles.get(name);
}
@Override
public TextureAtlas getSpriteAnimation(String animationName) {
return spriteAnimAtlases.get(animationName);
}
@Override
public BitmapFont getBitmapFont(String fontName, int fontSize) {
FontSizePair pair = new FontSizePair(fontName, fontSize);
return bitmapFonts.get(pair);
}
@Override
public MySkin getSkin() {
//return textureManager.projectSkin;
// not sure if we are going to use skins for labels
return null;
}
@Override
public ProjectInfoVO getProjectVO() {
ProjectManager projectManager = facade.retrieveProxy(ProjectManager.NAME);
return projectManager.getCurrentProjectInfoVO();
}
@Override
public SceneVO getSceneVO(String name) {
SceneDataManager sceneDataManager = facade.retrieveProxy(SceneDataManager.NAME);
// TODO: this should be cached
FileHandle file = Gdx.files.internal(sceneDataManager.getCurrProjectScenePathByName(name));
Json json = new Json();
json.setIgnoreUnknownFields(true);
return json.fromJson(SceneVO.class, file.readString());
}
public void loadCurrentProjectData(String projectPath, String curResolution) {
packResolutionName = curResolution;
loadCurrentProjectAssets(projectPath + "/assets/" + curResolution + "/pack/pack.atlas");
loadCurrentProjectSkin(projectPath + "/assets/orig/styles");
loadCurrentProjectParticles(projectPath + "/assets/orig/particles");
loadCurrentProjectSpineAnimations(projectPath + "/assets/", curResolution);
loadCurrentProjectSpriteAnimations(projectPath + "/assets/", curResolution);
loadCurrentProjectSpriterAnimations(projectPath + "/assets/", curResolution);
loadCurrentProjectBitmapFonts(projectPath, curResolution);
loadCurrentProjectShaders(projectPath + "/assets/shaders/");
}
private void loadCurrentProjectParticles(String path) {
particleEffects.clear();
FileHandle sourceDir = new FileHandle(path);
for (FileHandle entry : sourceDir.list()) {
File file = entry.file();
String filename = file.getName();
if (file.isDirectory() || filename.endsWith(".DS_Store")) continue;
ParticleEffect particleEffect = new ParticleEffect();
particleEffect.load(Gdx.files.internal(file.getAbsolutePath()), currentProjectAtlas, "");
particleEffects.put(filename, particleEffect);
}
}
private void loadCurrentProjectSpineAnimations(String path, String curResolution) {
spineAnimAtlases.clear();
FileHandle sourceDir = new FileHandle(path + "orig/spine-animations");
for (FileHandle entry : sourceDir.list()) {
if (entry.file().isDirectory()) {
String animName = FilenameUtils.removeExtension(entry.file().getName());
TextureAtlas atlas = new TextureAtlas(Gdx.files.internal(path + curResolution + "/spine-animations/" + File.separator + animName + File.separator + animName + ".atlas"));
FileHandle animJsonFile = Gdx.files.internal(entry.file().getAbsolutePath() + File.separator + animName + ".json");
SpineAnimData data = new SpineAnimData();
data.atlas = atlas;
data.jsonFile = animJsonFile;
data.animName = animName;
spineAnimAtlases.put(animName, data);
}
}
}
private void loadCurrentProjectSpriteAnimations(String path, String curResolution) {
spriteAnimAtlases.clear();
FileHandle sourceDir = new FileHandle(path + curResolution + "/sprite-animations");
for (FileHandle entry : sourceDir.list()) {
if (entry.file().isDirectory()) {
String animName = FilenameUtils.removeExtension(entry.file().getName());
TextureAtlas atlas = new TextureAtlas(Gdx.files.internal(entry.file().getAbsolutePath() + File.separator + animName + ".atlas"));
spriteAnimAtlases.put(animName, atlas);
}
}
}
private void loadCurrentProjectSpriterAnimations(String path, String curResolution) {
spriterAnimFiles.clear();
FileHandle sourceDir = new FileHandle(path + "orig" + "/spriter-animations");
for (FileHandle entry : sourceDir.list()) {
if (entry.file().isDirectory()) {
String animName = entry.file().getName();
FileHandle scmlFile = new FileHandle(path + "orig" + "/spriter-animations/" + animName + "/" + animName + ".scml");
spriterAnimFiles.put(animName, scmlFile);
}
}
}
public void loadCurrentProjectAssets(String packPath) {
try {
currentProjectAtlas = new TextureAtlas(Gdx.files.getFileHandle(packPath, Files.FileType.Internal));
} catch (Exception e) {
currentProjectAtlas = new TextureAtlas();
}
}
public ArrayList<FontSizePair> getProjectRequiredFontsList() {
HashSet<FontSizePair> fontsToLoad = new HashSet<>();
for (int i = 0; i < getProjectVO().scenes.size(); i++) {
SceneVO scene = getSceneVO(getProjectVO().scenes.get(i).sceneName);
CompositeVO composite = scene.composite;
if (composite == null) {
continue;
}
FontSizePair[] fonts = composite.getRecursiveFontList();
for (CompositeItemVO library : getProjectVO().libraryItems.values()) {
FontSizePair[] libFonts = library.composite.getRecursiveFontList();
Collections.addAll(fontsToLoad, libFonts);
}
Collections.addAll(fontsToLoad, fonts);
}
return new ArrayList<>(fontsToLoad);
}
public void loadCurrentProjectBitmapFonts(String path, String curResolution) {
bitmapFonts.clear();
ArrayList<FontSizePair> requiredFonts = getProjectRequiredFontsList();
for (int i = 0; i < requiredFonts.size(); i++) {
FontSizePair pair = requiredFonts.get(i);
FileHandle fontFile;
try {
fontFile = getTTFSafely(pair.fontName);
FreeTypeFontGenerator generator = new FreeTypeFontGenerator(fontFile);
FreeTypeFontGenerator.FreeTypeFontParameter parameter = new FreeTypeFontGenerator.FreeTypeFontParameter();
parameter.size = Math.round(pair.fontSize * resolutionManager.getCurrentMul());
BitmapFont font = generator.generateFont(parameter);
bitmapFonts.put(pair, font);
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void loadCurrentProjectShaders(String path) {
Iterator<Entry<String, ShaderProgram>> it = shaderPrograms.entrySet().iterator();
while (it.hasNext()) {
Entry<String, ShaderProgram> pair = it.next();
pair.getValue().dispose();
it.remove();
}
shaderPrograms.clear();
FileHandle sourceDir = new FileHandle(path);
for (FileHandle entry : sourceDir.list()) {
File file = entry.file();
String filename = file.getName().replace(".vert", "").replace(".frag", "");
if (file.isDirectory() || filename.endsWith(".DS_Store") || shaderPrograms.containsKey(filename)) continue;
// check if pair exists.
if(Gdx.files.internal(path + filename + ".vert").exists() && Gdx.files.internal(path + filename + ".frag").exists()) {
ShaderProgram shaderProgram = new ShaderProgram(Gdx.files.internal(path + filename + ".vert"), Gdx.files.internal(path + filename + ".frag"));
System.out.println(shaderProgram.getLog());
shaderPrograms.put(filename, shaderProgram);
}
}
}
/**
* @param fontPath
* @deprecated
*/
private void loadCurrentProjectSkin(String fontPath) {
/*
File styleFile = new File(fontPath, "styles.dt");
FileHandle f = new FileHandle(styleFile);
if (styleFile.isFile() && styleFile.exists()) {
projectSkin = new MySkin(f);
ObjectMap<String, BitmapFont> map = projectSkin.getAll(BitmapFont.class);
for (ObjectMap.Entry<String, BitmapFont> entry : map.entries()) {
projectSkin.getFont(entry.key).getRegion().getTexture().setFilter(Texture.TextureFilter.Linear, Texture.TextureFilter.Linear);
}
}
*/
}
public FileHandle getTTFSafely(String fontName) throws IOException {
FontManager fontManager = facade.retrieveProxy(FontManager.NAME);
ProjectManager projectManager = facade.retrieveProxy(ProjectManager.NAME);
String expectedPath = projectManager.getFreeTypeFontPath() + File.separator + fontName + ".ttf";
FileHandle expectedFile = Gdx.files.internal(expectedPath);
if (!expectedFile.exists()) {
// let's check if system fonts fot it
HashMap<String, String> fonts = fontManager.getFontsMap();
if (fonts.containsKey(fontName)) {
File source = new File(fonts.get(fontName));
FileUtils.copyFile(source, expectedFile.file());
expectedFile = Gdx.files.internal(expectedPath);
} else {
throw new FileNotFoundException();
}
}
return expectedFile;
}
public void addBitmapFont(String name, int size, BitmapFont font) {
bitmapFonts.put(new FontSizePair(name, size), font);
}
public void flushAllUnusedFonts() {
//List of fonts that are required to be in memory
ArrayList<FontSizePair> requiredFonts = getProjectRequiredFontsList();
ArrayList<FontSizePair> fontsInMemory = new ArrayList<>(bitmapFonts.keySet());
for (FontSizePair font : fontsInMemory) {
if (!requiredFonts.contains(font)) {
bitmapFonts.remove(font);
}
}
}
public boolean isFontLoaded(String shortName, int fontSize) {
return bitmapFonts.containsKey(new FontSizePair(shortName, fontSize));
}
public void prepareEmbeddingFont(String fontfamily, int fontSize) {
flushAllUnusedFonts();
if (isFontLoaded(fontfamily, fontSize)) {
return;
}
FontManager fontManager = facade.retrieveProxy(FontManager.NAME);
FreeTypeFontGenerator.FreeTypeFontParameter parameter = new FreeTypeFontGenerator.FreeTypeFontParameter();
parameter.size = fontSize;
FreeTypeFontGenerator generator = new FreeTypeFontGenerator(fontManager.getTTFByName(fontfamily));
BitmapFont font = generator.generateFont(parameter);
addBitmapFont(fontfamily, parameter.size, font);
}
public HashMap<String, SpineAnimData> getProjectSpineAnimationsList() {
return spineAnimAtlases;
}
public HashMap<String, TextureAtlas> getProjectSpriteAnimationsList() {
return spriteAnimAtlases;
}
public HashMap<String, FileHandle> getProjectSpriterAnimationsList() {
return spriterAnimFiles;
}
public TextureAtlas getProjectAssetsList() {
return currentProjectAtlas;
}
public HashMap<String, ParticleEffect> getProjectParticleList() {
return particleEffects;
}
@Override
public ResolutionEntryVO getLoadedResolution() {
if(packResolutionName.equals("orig")) {
return getProjectVO().originalResolution;
}
return getProjectVO().getResolution(packResolutionName);
}
@Override
public ShaderProgram getShaderProgram(String shaderName) {
return shaderPrograms.get(shaderName);
}
public HashMap<String, ShaderProgram> getShaders() {
return shaderPrograms;
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2010-2012 Project SkyFire <http://www.projectskyfire.org/>
* Copyright (C) 2005-2012 MaNGOS <http://www.getmangos.com/>
* Copyright (C) 2008-2012 Trinity <http://www.trinitycore.org/>
* Copyright (C) 2005-2012 ScriptDev2 <http://http://www.scriptdev2.com/>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef DEF_ONYXIAS_LAIR_H
#define DEF_ONYXIAS_LAIR_H
enum eData64
{
DATA_ONYXIA_GUID,
DATA_FLOOR_ERUPTION_GUID
};
enum eInstanceData
{
DATA_ONYXIA,
MAX_ENCOUNTER,
DATA_ONYXIA_PHASE,
DATA_SHE_DEEP_BREATH_MORE,
DATA_MANY_WHELPS_COUNT
};
enum eCreatures
{
NPC_WHELP = 11262,
NPC_LAIRGUARD = 36561,
NPC_ONYXIA = 10184
};
enum eOnyxiaPhases
{
PHASE_START = 1,
PHASE_BREATH = 2,
PHASE_END = 3
};
enum eGameObjects
{
GO_WHELP_SPAWNER = 176510,
GO_WHELP_EGG = 176511
};
enum eAchievementData
{
ACHIEV_CRITERIA_MANY_WHELPS_10_PLAYER = 12565, // Criteria for achievement 4403: Many Whelps! Handle It! (10 player) Hatch 50 eggs in 10s
ACHIEV_CRITERIA_MANY_WHELPS_25_PLAYER = 12568, // Criteria for achievement 4406: Many Whelps! Handle It! (25 player) Hatch 50 eggs in 10s
ACHIEV_CRITERIA_DEEP_BREATH_10_PLAYER = 12566, // Criteria for achievement 4404: She Deep Breaths More (10 player) Everybody evade Deep Breath
ACHIEV_CRITERIA_DEEP_BREATH_25_PLAYER = 12569, // Criteria for achievement 4407: She Deep Breaths More (25 player) Everybody evade Deep Breath
ACHIEV_TIMED_START_EVENT = 6601, // Timed event for achievement 4402, 4005: More Dots! (10, 25 player) 5 min kill
};
#endif | {
"pile_set_name": "Github"
} |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.resourceestimator.translator.api;
import java.text.ParseException;
import org.apache.hadoop.resourceestimator.common.api.RecurrenceId;
import org.apache.hadoop.resourceestimator.translator.impl.LogParserUtil;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.server.resourcemanager.reservation.RLESparseResourceAllocation;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Test JobMetaData.
*/
public class TestJobMetaData {
/**
* TODO: parametrize this test.
*/
private LogParserUtil logParserUtil = new LogParserUtil();
private JobMetaData jobMetaData;
private RecurrenceId recurrenceId;
@Before public final void setup() throws ParseException {
recurrenceId = new RecurrenceId("Fraud Detection", "17/07/16 16:27:25");
jobMetaData = new JobMetaData(
logParserUtil.stringToUnixTimestamp("17/07/16 16:27:25"));
jobMetaData.setRecurrenceId(recurrenceId);
jobMetaData.setContainerStart("C1",
logParserUtil.stringToUnixTimestamp("17/07/16 16:27:30"));
jobMetaData.setContainerEnd("C1",
logParserUtil.stringToUnixTimestamp("17/07/16 16:37:30"));
jobMetaData.setContainerStart("C2",
logParserUtil.stringToUnixTimestamp("17/07/16 16:27:40"));
jobMetaData.setContainerEnd("C2",
logParserUtil.stringToUnixTimestamp("17/07/16 16:37:40"));
jobMetaData.setJobFinishTime(
logParserUtil.stringToUnixTimestamp("17/07/16 16:37:45"));
final Resource containerAlloc = Resource.newInstance(1, 1);
jobMetaData.getResourceSkyline().setContainerSpec(containerAlloc);
jobMetaData.getResourceSkyline().setJobInputDataSize(1024.5);
jobMetaData.createSkyline();
}
@Test public final void testGetContainerSpec() {
final Resource containerAlloc =
jobMetaData.getResourceSkyline().getContainerSpec();
final Resource containerAlloc2 = Resource.newInstance(1, 1);
Assert.assertEquals(containerAlloc.getMemorySize(),
containerAlloc2.getMemorySize());
Assert.assertEquals(containerAlloc.getVirtualCores(),
containerAlloc2.getVirtualCores());
}
@Test public final void testGetJobSize() {
Assert.assertEquals(jobMetaData.getResourceSkyline().getJobInputDataSize(),
1024.5, 0);
}
@Test public final void testGetRecurrenceeId() {
final RecurrenceId recurrenceIdTest =
new RecurrenceId("Fraud Detection", "17/07/16 16:27:25");
Assert.assertEquals(recurrenceIdTest, jobMetaData.getRecurrenceId());
}
@Test public final void testStringToUnixTimestamp() throws ParseException {
final long submissionTime =
logParserUtil.stringToUnixTimestamp("17/07/16 16:27:25");
Assert.assertEquals(jobMetaData.getResourceSkyline().getJobSubmissionTime(),
submissionTime);
}
@Test public final void testResourceSkyline() {
final RLESparseResourceAllocation skylineList =
jobMetaData.getResourceSkyline().getSkylineList();
final int containerCPU =
jobMetaData.getResourceSkyline().getContainerSpec().getVirtualCores();
int k;
for (k = 0; k < 5; k++) {
Assert.assertEquals(0,
skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU);
}
for (k = 5; k < 15; k++) {
Assert.assertEquals(1,
skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU);
}
for (k = 15; k < 605; k++) {
Assert.assertEquals(2,
skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU);
}
for (k = 605; k < 615; k++) {
Assert.assertEquals(1,
skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU);
}
Assert.assertEquals(0,
skylineList.getCapacityAtTime(615).getVirtualCores() / containerCPU);
}
@Test public final void testContainerReleaseTimeMissing()
throws ParseException {
// create an invalid JobMetaData
recurrenceId = new RecurrenceId("Fraud Detection", "17/07/16 16:27:25");
jobMetaData = new JobMetaData(
logParserUtil.stringToUnixTimestamp("17/07/16 16:27:25"));
jobMetaData.setRecurrenceId(recurrenceId);
jobMetaData.setContainerStart("C1",
logParserUtil.stringToUnixTimestamp("17/07/16 16:27:30"));
jobMetaData.setContainerEnd("C1",
logParserUtil.stringToUnixTimestamp("17/07/16 16:37:30"));
jobMetaData.setContainerStart("C2",
logParserUtil.stringToUnixTimestamp("17/07/16 16:27:40"));
jobMetaData.setJobFinishTime(
logParserUtil.stringToUnixTimestamp("17/07/16 16:37:45"));
final Resource containerAlloc = Resource.newInstance(1, 1);
jobMetaData.getResourceSkyline().setContainerSpec(containerAlloc);
jobMetaData.getResourceSkyline().setJobInputDataSize(1024.5);
jobMetaData.createSkyline();
// test the generated ResourceSkyline
final RLESparseResourceAllocation skylineList =
jobMetaData.getResourceSkyline().getSkylineList();
final int containerCPU =
jobMetaData.getResourceSkyline().getContainerSpec().getVirtualCores();
int k;
for (k = 0; k < 5; k++) {
Assert.assertEquals(0,
skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU);
}
for (k = 5; k < 605; k++) {
Assert.assertEquals(1,
skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU);
}
Assert.assertEquals(0,
skylineList.getCapacityAtTime(605).getVirtualCores() / containerCPU);
}
@After public final void cleanUp() {
jobMetaData = null;
recurrenceId = null;
logParserUtil = null;
}
}
| {
"pile_set_name": "Github"
} |
include_directories(
include
${intergen_SOURCE_DIR}/model/include
${intergen_SOURCE_DIR}/utils/include
)
set (SOURCES
src/cppgen/comment.cc
src/cppgen/cpp_api_code_generator.cc
src/cppgen/cpp_class.cc
src/cppgen/cpp_file.cc
src/cppgen/cpp_function.cc
src/cppgen/cpp_interface_code_generator.cc
src/cppgen/declaration_generator.cc
src/cppgen/definition_generator.cc
src/cppgen/enum_from_json_value_function.cc
src/cppgen/enum_to_json_value_function.cc
src/cppgen/function_id_method.cc
src/cppgen/generator_preferences.cc
src/cppgen/handler_interface.cc
src/cppgen/is_valid_enum_function.cc
src/cppgen/literal_generator.cc
src/cppgen/message_factory_function.cc
src/cppgen/message_handle_with_method.cc
src/cppgen/message_interface.cc
src/cppgen/module_manager.cc
src/cppgen/namespace.cc
src/cppgen/naming_convention.cc
src/cppgen/struct_type_constructor.cc
src/cppgen/struct_type_from_json_method.cc
src/cppgen/struct_type_is_initialized_method.cc
src/cppgen/struct_type_is_valid_method.cc
src/cppgen/struct_type_report_erros_method.cc
src/cppgen/type_name_code_generator.cc
)
set (HEADERS
include/cppgen/comment.h
include/cppgen/cpp_api_code_generator.h
include/cppgen/cpp_class.h
include/cppgen/cpp_file.h
include/cppgen/cpp_function.h
include/cppgen/cpp_interface_code_generator.h
include/cppgen/declaration_generator.h
include/cppgen/definition_generator.h
include/cppgen/enum_from_json_value_function.h
include/cppgen/enum_to_json_value_function.h
include/cppgen/function_id_method.h
include/cppgen/generator_preferences.h
include/cppgen/handler_interface.h
include/cppgen/is_valid_enum_function.h
include/cppgen/literal_generator.h
include/cppgen/message_factory_function.h
include/cppgen/message_handle_with_method.h
include/cppgen/message_interface.h
include/cppgen/module_manager.h
include/cppgen/namespace.h
include/cppgen/naming_convention.h
include/cppgen/struct_type_constructor.h
include/cppgen/struct_type_from_json_method.h
include/cppgen/struct_type_is_initialized_method.h
include/cppgen/struct_type_is_valid_method.h
include/cppgen/struct_type_report_erros_method.h
include/cppgen/type_name_code_generator.h
)
add_library(intergen_cppgen ${HEADERS} ${SOURCES})
target_link_libraries(intergen_cppgen intergen_model intergen_utils)
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2016 Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* JDK-8170594: >>>=0 generates invalid bytecode for BaseNode LHS
*
* @test
* @run
*/
var obj1 = {x: "100"};
(function (o, p) {
if (p) {
o.x >>>= 0;
}
})(obj1, true)
Assert.assertTrue(obj1.x === 100)
var obj2 = ["100"];
(function (o, p) {
if (p) {
o[0] >>>= 0;
}
})(obj2, true)
Assert.assertTrue(obj2[0] === 100)
| {
"pile_set_name": "Github"
} |
package drds
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// VersionsItem is a nested struct in drds response
type VersionsItem struct {
DrdsVersion string `json:"DrdsVersion" xml:"DrdsVersion"`
Latest bool `json:"Latest" xml:"Latest"`
}
| {
"pile_set_name": "Github"
} |
package solver
import (
"context"
"io"
"time"
"github.com/moby/buildkit/client"
"github.com/moby/buildkit/util/progress"
digest "github.com/opencontainers/go-digest"
"github.com/sirupsen/logrus"
)
func (j *Job) Status(ctx context.Context, ch chan *client.SolveStatus) error {
vs := &vertexStream{cache: map[digest.Digest]*client.Vertex{}}
pr := j.pr.Reader(ctx)
defer func() {
if enc := vs.encore(); len(enc) > 0 {
ch <- &client.SolveStatus{Vertexes: enc}
}
close(ch)
}()
for {
p, err := pr.Read(ctx)
if err != nil {
if err == io.EOF {
return nil
}
return err
}
ss := &client.SolveStatus{}
for _, p := range p {
switch v := p.Sys.(type) {
case client.Vertex:
ss.Vertexes = append(ss.Vertexes, vs.append(v)...)
case progress.Status:
vtx, ok := p.Meta("vertex")
if !ok {
logrus.Warnf("progress %s status without vertex info", p.ID)
continue
}
vs := &client.VertexStatus{
ID: p.ID,
Vertex: vtx.(digest.Digest),
Name: v.Action,
Total: int64(v.Total),
Current: int64(v.Current),
Timestamp: p.Timestamp,
Started: v.Started,
Completed: v.Completed,
}
ss.Statuses = append(ss.Statuses, vs)
case client.VertexLog:
vtx, ok := p.Meta("vertex")
if !ok {
logrus.Warnf("progress %s log without vertex info", p.ID)
continue
}
v.Vertex = vtx.(digest.Digest)
v.Timestamp = p.Timestamp
ss.Logs = append(ss.Logs, &v)
}
}
select {
case <-ctx.Done():
return ctx.Err()
case ch <- ss:
}
}
}
type vertexStream struct {
cache map[digest.Digest]*client.Vertex
}
func (vs *vertexStream) append(v client.Vertex) []*client.Vertex {
var out []*client.Vertex
vs.cache[v.Digest] = &v
if v.Started != nil {
for _, inp := range v.Inputs {
if inpv, ok := vs.cache[inp]; ok {
if !inpv.Cached && inpv.Completed == nil {
inpv.Cached = true
inpv.Started = v.Started
inpv.Completed = v.Started
out = append(out, vs.append(*inpv)...)
delete(vs.cache, inp)
}
}
}
}
vcopy := v
return append(out, &vcopy)
}
func (vs *vertexStream) encore() []*client.Vertex {
var out []*client.Vertex
for _, v := range vs.cache {
if v.Started != nil && v.Completed == nil {
now := time.Now()
v.Completed = &now
v.Error = context.Canceled.Error()
out = append(out, v)
}
}
return out
}
| {
"pile_set_name": "Github"
} |
proc testAnonRanges(type lowT, type countT) {
var zero = 0:countT;
// Applying #0 to a 0.. uint range results in wraparound leading to
// an error when trying to iterate over it when bounds checks are
// on.
for i in 0:lowT..#(0:countT) do write(i, ' '); writeln();
for i in 0:lowT..#(zero) do write(i, ' '); writeln();
for i in 0:lowT..#(1:countT) do write(i, ' '); writeln();
for i in 0:lowT..#(10:countT) by 2:lowT do write(i, ' '); writeln();
for i in (0:lowT.. by 2:lowT) #(10:countT) do write(i, ' '); writeln();
for i in 10:lowT..#10:countT do write(i, ' '); writeln();
}
testAnonRanges(uint(64), int(64));
| {
"pile_set_name": "Github"
} |
using System;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations;
using Migrations.Context;
namespace ComputedColumns.EF.Migrations
{
[DbContext(typeof(StoreContext))]
partial class StoreContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
modelBuilder
.HasAnnotation("ProductVersion", "1.1.0-rtm-22752")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
modelBuilder.Entity("ComputedColumns.Models.Order", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<int>("CustomerId");
b.Property<DateTime>("OrderDate")
.ValueGeneratedOnAdd()
.HasColumnType("datetime")
.HasDefaultValueSql("getdate()");
b.Property<decimal?>("OrderTotal")
.ValueGeneratedOnAddOrUpdate()
.HasColumnType("money")
.HasComputedColumnSql("Store.GetOrderTotal([Id])");
b.Property<DateTime>("ShipDate")
.ValueGeneratedOnAdd()
.HasColumnType("datetime")
.HasDefaultValueSql("getdate()");
b.Property<byte[]>("TimeStamp")
.IsConcurrencyToken()
.ValueGeneratedOnAddOrUpdate();
b.HasKey("Id");
b.ToTable("Orders","Store");
});
modelBuilder.Entity("ComputedColumns.Models.OrderDetail", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
b.Property<decimal?>("LineItemTotal")
.ValueGeneratedOnAddOrUpdate()
.HasColumnType("money")
.HasComputedColumnSql("[Quantity]*[UnitCost]");
b.Property<int>("OrderId");
b.Property<int>("Quantity");
b.Property<byte[]>("TimeStamp")
.IsConcurrencyToken()
.ValueGeneratedOnAddOrUpdate();
b.Property<decimal>("UnitCost")
.HasColumnType("money");
b.HasKey("Id");
b.HasIndex("OrderId");
b.ToTable("OrderDetails","Store");
});
modelBuilder.Entity("ComputedColumns.Models.OrderDetail", b =>
{
b.HasOne("ComputedColumns.Models.Order", "Order")
.WithMany("OrderDetails")
.HasForeignKey("OrderId")
.OnDelete(DeleteBehavior.Cascade);
});
}
}
}
| {
"pile_set_name": "Github"
} |
/* This source file must have a .cpp extension so that all C++ compilers
recognize the extension without flags. Borland does not know .cxx for
example. */
#ifndef __cplusplus
# error "A C compiler has been selected for C++."
#endif
/* Version number components: V=Version, R=Revision, P=Patch
Version date components: YYYY=Year, MM=Month, DD=Day */
#if defined(__COMO__)
# define COMPILER_ID "Comeau"
/* __COMO_VERSION__ = VRR */
# define COMPILER_VERSION_MAJOR DEC(__COMO_VERSION__ / 100)
# define COMPILER_VERSION_MINOR DEC(__COMO_VERSION__ % 100)
#elif defined(__INTEL_COMPILER) || defined(__ICC)
# define COMPILER_ID "Intel"
/* __INTEL_COMPILER = VRP */
# define COMPILER_VERSION_MAJOR DEC(__INTEL_COMPILER/100)
# define COMPILER_VERSION_MINOR DEC(__INTEL_COMPILER/10 % 10)
# define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER % 10)
# if defined(__INTEL_COMPILER_BUILD_DATE)
/* __INTEL_COMPILER_BUILD_DATE = YYYYMMDD */
# define COMPILER_VERSION_TWEAK DEC(__INTEL_COMPILER_BUILD_DATE)
# endif
#elif defined(__PATHCC__)
# define COMPILER_ID "PathScale"
# define COMPILER_VERSION_MAJOR DEC(__PATHCC__)
# define COMPILER_VERSION_MINOR DEC(__PATHCC_MINOR__)
# if defined(__PATHCC_PATCHLEVEL__)
# define COMPILER_VERSION_PATCH DEC(__PATHCC_PATCHLEVEL__)
# endif
#elif defined(__clang__)
# define COMPILER_ID "Clang"
# define COMPILER_VERSION_MAJOR DEC(__clang_major__)
# define COMPILER_VERSION_MINOR DEC(__clang_minor__)
# define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__)
#elif defined(__BORLANDC__) && defined(__CODEGEARC_VERSION__)
# define COMPILER_ID "Embarcadero"
# define COMPILER_VERSION_MAJOR HEX(__CODEGEARC_VERSION__>>24 & 0x00FF)
# define COMPILER_VERSION_MINOR HEX(__CODEGEARC_VERSION__>>16 & 0x00FF)
# define COMPILER_VERSION_PATCH HEX(__CODEGEARC_VERSION__ & 0xFFFF)
#elif defined(__BORLANDC__)
# define COMPILER_ID "Borland"
/* __BORLANDC__ = 0xVRR */
# define COMPILER_VERSION_MAJOR HEX(__BORLANDC__>>8)
# define COMPILER_VERSION_MINOR HEX(__BORLANDC__ & 0xFF)
#elif defined(__WATCOMC__)
# define COMPILER_ID "Watcom"
/* __WATCOMC__ = VVRR */
# define COMPILER_VERSION_MAJOR DEC(__WATCOMC__ / 100)
# define COMPILER_VERSION_MINOR DEC(__WATCOMC__ % 100)
#elif defined(__SUNPRO_CC)
# define COMPILER_ID "SunPro"
# if __SUNPRO_CC >= 0x5100
/* __SUNPRO_CC = 0xVRRP */
# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_CC>>12)
# define COMPILER_VERSION_MINOR HEX(__SUNPRO_CC>>4 & 0xFF)
# define COMPILER_VERSION_PATCH HEX(__SUNPRO_CC & 0xF)
# else
/* __SUNPRO_CC = 0xVRP */
# define COMPILER_VERSION_MAJOR HEX(__SUNPRO_CC>>8)
# define COMPILER_VERSION_MINOR HEX(__SUNPRO_CC>>4 & 0xF)
# define COMPILER_VERSION_PATCH HEX(__SUNPRO_CC & 0xF)
# endif
#elif defined(__HP_aCC)
# define COMPILER_ID "HP"
/* __HP_aCC = VVRRPP */
# define COMPILER_VERSION_MAJOR DEC(__HP_aCC/10000)
# define COMPILER_VERSION_MINOR DEC(__HP_aCC/100 % 100)
# define COMPILER_VERSION_PATCH DEC(__HP_aCC % 100)
#elif defined(__DECCXX)
# define COMPILER_ID "Compaq"
/* __DECCXX_VER = VVRRTPPPP */
# define COMPILER_VERSION_MAJOR DEC(__DECCXX_VER/10000000)
# define COMPILER_VERSION_MINOR DEC(__DECCXX_VER/100000 % 100)
# define COMPILER_VERSION_PATCH DEC(__DECCXX_VER % 10000)
#elif defined(__IBMCPP__)
# if defined(__COMPILER_VER__)
# define COMPILER_ID "zOS"
# else
# if __IBMCPP__ >= 800
# define COMPILER_ID "XL"
# else
# define COMPILER_ID "VisualAge"
# endif
/* __IBMCPP__ = VRP */
# define COMPILER_VERSION_MAJOR DEC(__IBMCPP__/100)
# define COMPILER_VERSION_MINOR DEC(__IBMCPP__/10 % 10)
# define COMPILER_VERSION_PATCH DEC(__IBMCPP__ % 10)
# endif
#elif defined(__PGI)
# define COMPILER_ID "PGI"
# define COMPILER_VERSION_MAJOR DEC(__PGIC__)
# define COMPILER_VERSION_MINOR DEC(__PGIC_MINOR__)
# if defined(__PGIC_PATCHLEVEL__)
# define COMPILER_VERSION_PATCH DEC(__PGIC_PATCHLEVEL__)
# endif
#elif defined(_CRAYC)
# define COMPILER_ID "Cray"
# define COMPILER_VERSION_MAJOR DEC(_RELEASE)
# define COMPILER_VERSION_MINOR DEC(_RELEASE_MINOR)
#elif defined(__TI_COMPILER_VERSION__)
# define COMPILER_ID "TI"
/* __TI_COMPILER_VERSION__ = VVVRRRPPP */
# define COMPILER_VERSION_MAJOR DEC(__TI_COMPILER_VERSION__/1000000)
# define COMPILER_VERSION_MINOR DEC(__TI_COMPILER_VERSION__/1000 % 1000)
# define COMPILER_VERSION_PATCH DEC(__TI_COMPILER_VERSION__ % 1000)
#elif defined(__SCO_VERSION__)
# define COMPILER_ID "SCO"
#elif defined(__GNUC__)
# define COMPILER_ID "GNU"
# define COMPILER_VERSION_MAJOR DEC(__GNUC__)
# define COMPILER_VERSION_MINOR DEC(__GNUC_MINOR__)
# if defined(__GNUC_PATCHLEVEL__)
# define COMPILER_VERSION_PATCH DEC(__GNUC_PATCHLEVEL__)
# endif
#elif defined(_MSC_VER)
# define COMPILER_ID "MSVC"
/* _MSC_VER = VVRR */
# define COMPILER_VERSION_MAJOR DEC(_MSC_VER / 100)
# define COMPILER_VERSION_MINOR DEC(_MSC_VER % 100)
# if defined(_MSC_FULL_VER)
# if _MSC_VER >= 1400
/* _MSC_FULL_VER = VVRRPPPPP */
# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 100000)
# else
/* _MSC_FULL_VER = VVRRPPPP */
# define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 10000)
# endif
# endif
# if defined(_MSC_BUILD)
# define COMPILER_VERSION_TWEAK DEC(_MSC_BUILD)
# endif
/* Analog VisualDSP++ >= 4.5.6 */
#elif defined(__VISUALDSPVERSION__)
# define COMPILER_ID "ADSP"
/* __VISUALDSPVERSION__ = 0xVVRRPP00 */
# define COMPILER_VERSION_MAJOR HEX(__VISUALDSPVERSION__>>24)
# define COMPILER_VERSION_MINOR HEX(__VISUALDSPVERSION__>>16 & 0xFF)
# define COMPILER_VERSION_PATCH HEX(__VISUALDSPVERSION__>>8 & 0xFF)
/* Analog VisualDSP++ < 4.5.6 */
#elif defined(__ADSPBLACKFIN__) || defined(__ADSPTS__) || defined(__ADSP21000__)
# define COMPILER_ID "ADSP"
/* IAR Systems compiler for embedded systems.
http://www.iar.com */
#elif defined(__IAR_SYSTEMS_ICC__ ) || defined(__IAR_SYSTEMS_ICC)
# define COMPILER_ID "IAR"
#elif defined(_SGI_COMPILER_VERSION) || defined(_COMPILER_VERSION)
# define COMPILER_ID "MIPSpro"
# if defined(_SGI_COMPILER_VERSION)
/* _SGI_COMPILER_VERSION = VRP */
# define COMPILER_VERSION_MAJOR DEC(_SGI_COMPILER_VERSION/100)
# define COMPILER_VERSION_MINOR DEC(_SGI_COMPILER_VERSION/10 % 10)
# define COMPILER_VERSION_PATCH DEC(_SGI_COMPILER_VERSION % 10)
# else
/* _COMPILER_VERSION = VRP */
# define COMPILER_VERSION_MAJOR DEC(_COMPILER_VERSION/100)
# define COMPILER_VERSION_MINOR DEC(_COMPILER_VERSION/10 % 10)
# define COMPILER_VERSION_PATCH DEC(_COMPILER_VERSION % 10)
# endif
/* This compiler is either not known or is too old to define an
identification macro. Try to identify the platform and guess that
it is the native compiler. */
#elif defined(__sgi)
# define COMPILER_ID "MIPSpro"
#elif defined(__hpux) || defined(__hpua)
# define COMPILER_ID "HP"
#else /* unknown compiler */
# define COMPILER_ID ""
#endif
/* Construct the string literal in pieces to prevent the source from
getting matched. Store it in a pointer rather than an array
because some compilers will just produce instructions to fill the
array rather than assigning a pointer to a static array. */
char const* info_compiler = "INFO" ":" "compiler[" COMPILER_ID "]";
/* Identify known platforms by name. */
#if defined(__linux) || defined(__linux__) || defined(linux)
# define PLATFORM_ID "Linux"
#elif defined(__CYGWIN__)
# define PLATFORM_ID "Cygwin"
#elif defined(__MINGW32__)
# define PLATFORM_ID "MinGW"
#elif defined(__APPLE__)
# define PLATFORM_ID "Darwin"
#elif defined(_WIN32) || defined(__WIN32__) || defined(WIN32)
# define PLATFORM_ID "Windows"
#elif defined(__FreeBSD__) || defined(__FreeBSD)
# define PLATFORM_ID "FreeBSD"
#elif defined(__NetBSD__) || defined(__NetBSD)
# define PLATFORM_ID "NetBSD"
#elif defined(__OpenBSD__) || defined(__OPENBSD)
# define PLATFORM_ID "OpenBSD"
#elif defined(__sun) || defined(sun)
# define PLATFORM_ID "SunOS"
#elif defined(_AIX) || defined(__AIX) || defined(__AIX__) || defined(__aix) || defined(__aix__)
# define PLATFORM_ID "AIX"
#elif defined(__sgi) || defined(__sgi__) || defined(_SGI)
# define PLATFORM_ID "IRIX"
#elif defined(__hpux) || defined(__hpux__)
# define PLATFORM_ID "HP-UX"
#elif defined(__HAIKU__)
# define PLATFORM_ID "Haiku"
#elif defined(__BeOS) || defined(__BEOS__) || defined(_BEOS)
# define PLATFORM_ID "BeOS"
#elif defined(__QNX__) || defined(__QNXNTO__)
# define PLATFORM_ID "QNX"
#elif defined(__tru64) || defined(_tru64) || defined(__TRU64__)
# define PLATFORM_ID "Tru64"
#elif defined(__riscos) || defined(__riscos__)
# define PLATFORM_ID "RISCos"
#elif defined(__sinix) || defined(__sinix__) || defined(__SINIX__)
# define PLATFORM_ID "SINIX"
#elif defined(__UNIX_SV__)
# define PLATFORM_ID "UNIX_SV"
#elif defined(__bsdos__)
# define PLATFORM_ID "BSDOS"
#elif defined(_MPRAS) || defined(MPRAS)
# define PLATFORM_ID "MP-RAS"
#elif defined(__osf) || defined(__osf__)
# define PLATFORM_ID "OSF1"
#elif defined(_SCO_SV) || defined(SCO_SV) || defined(sco_sv)
# define PLATFORM_ID "SCO_SV"
#elif defined(__ultrix) || defined(__ultrix__) || defined(_ULTRIX)
# define PLATFORM_ID "ULTRIX"
#elif defined(__XENIX__) || defined(_XENIX) || defined(XENIX)
# define PLATFORM_ID "Xenix"
#else /* unknown platform */
# define PLATFORM_ID ""
#endif
/* For windows compilers MSVC and Intel we can determine
the architecture of the compiler being used. This is because
the compilers do not have flags that can change the architecture,
but rather depend on which compiler is being used
*/
#if defined(_WIN32) && defined(_MSC_VER)
# if defined(_M_IA64)
# define ARCHITECTURE_ID "IA64"
# elif defined(_M_X64) || defined(_M_AMD64)
# define ARCHITECTURE_ID "x64"
# elif defined(_M_IX86)
# define ARCHITECTURE_ID "X86"
# elif defined(_M_ARM)
# define ARCHITECTURE_ID "ARM"
# elif defined(_M_MIPS)
# define ARCHITECTURE_ID "MIPS"
# elif defined(_M_SH)
# define ARCHITECTURE_ID "SHx"
# else /* unknown architecture */
# define ARCHITECTURE_ID ""
# endif
#else
# define ARCHITECTURE_ID ""
#endif
/* Convert integer to decimal digit literals. */
#define DEC(n) \
('0' + (((n) / 10000000)%10)), \
('0' + (((n) / 1000000)%10)), \
('0' + (((n) / 100000)%10)), \
('0' + (((n) / 10000)%10)), \
('0' + (((n) / 1000)%10)), \
('0' + (((n) / 100)%10)), \
('0' + (((n) / 10)%10)), \
('0' + ((n) % 10))
/* Convert integer to hex digit literals. */
#define HEX(n) \
('0' + ((n)>>28 & 0xF)), \
('0' + ((n)>>24 & 0xF)), \
('0' + ((n)>>20 & 0xF)), \
('0' + ((n)>>16 & 0xF)), \
('0' + ((n)>>12 & 0xF)), \
('0' + ((n)>>8 & 0xF)), \
('0' + ((n)>>4 & 0xF)), \
('0' + ((n) & 0xF))
/* Construct a string literal encoding the version number components. */
#ifdef COMPILER_VERSION_MAJOR
char const info_version[] = {
'I', 'N', 'F', 'O', ':',
'c','o','m','p','i','l','e','r','_','v','e','r','s','i','o','n','[',
COMPILER_VERSION_MAJOR,
# ifdef COMPILER_VERSION_MINOR
'.', COMPILER_VERSION_MINOR,
# ifdef COMPILER_VERSION_PATCH
'.', COMPILER_VERSION_PATCH,
# ifdef COMPILER_VERSION_TWEAK
'.', COMPILER_VERSION_TWEAK,
# endif
# endif
# endif
']','\0'};
#endif
/* Construct the string literal in pieces to prevent the source from
getting matched. Store it in a pointer rather than an array
because some compilers will just produce instructions to fill the
array rather than assigning a pointer to a static array. */
char const* info_platform = "INFO" ":" "platform[" PLATFORM_ID "]";
char const* info_arch = "INFO" ":" "arch[" ARCHITECTURE_ID "]";
/*--------------------------------------------------------------------------*/
int main(int argc, char* argv[])
{
int require = 0;
require += info_compiler[argc];
require += info_platform[argc];
#ifdef COMPILER_VERSION_MAJOR
require += info_version[argc];
#endif
(void)argv;
return require;
}
| {
"pile_set_name": "Github"
} |
/*
* This file is part of Telegram Server
* Copyright (C) 2015 Aykut Alparslan KOÇ
*
* Telegram Server is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Telegram Server is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.telegram.tl.auth;
import org.telegram.mtproto.ProtocolBuffer;
import org.telegram.tl.*;
public class BindTempAuthKey extends TLObject {
public static final int ID = -841733627;
public long perm_auth_key_id;
public long nonce;
public int expires_at;
public byte[] encrypted_message;
public BindTempAuthKey() {
}
public BindTempAuthKey(long perm_auth_key_id, long nonce, int expires_at, byte[] encrypted_message){
this.perm_auth_key_id = perm_auth_key_id;
this.nonce = nonce;
this.expires_at = expires_at;
this.encrypted_message = encrypted_message;
}
@Override
public void deserialize(ProtocolBuffer buffer) {
perm_auth_key_id = buffer.readLong();
nonce = buffer.readLong();
expires_at = buffer.readInt();
encrypted_message = buffer.readBytes();
}
@Override
public ProtocolBuffer serialize() {
ProtocolBuffer buffer = new ProtocolBuffer(32);
serializeTo(buffer);
return buffer;
}
@Override
public void serializeTo(ProtocolBuffer buff) {
buff.writeInt(getConstructor());
buff.writeLong(perm_auth_key_id);
buff.writeLong(nonce);
buff.writeInt(expires_at);
buff.writeBytes(encrypted_message);
}
public int getConstructor() {
return ID;
}
} | {
"pile_set_name": "Github"
} |