{"text": "var videoState={\n\n \n create: function(){\n var video;\n var sprite;\n \n video = game.add.video('storyvideo');\n video.play(false);\n sprite = video.addToWorld(game.world.centerX, game.world.centerY, 0.5, 0.5, 2, 2);\n//pause\n var picLabel= game.add.image(game.width/2, game.height-30, 'skip');\n picLabel.anchor.setTo(-1,1);\n picLabel.inputEnabled = true;\n picLabel.events.onInputDown.add(this.start, this);\n\n//skip video\n// var picLabel= game.add.image(game.width/2, game.height-30, 'skip');\n// picLabel.anchor.setTo(-2,1);\n// picLabel.inputEnabled = true;\n// picLabel.events.onInputDown.add(this.startgame, this);\n\n\n\n\n\n\n // game.time.events.add(Phaser.Timer.SECOND * 10, this.fadeState, this);\n\n // game.stage.backgroundColor= '#ffffff';\n\n game.physics.startSystem(Phaser.Physics. ARCADE);\n game.renderer.renderSession.roundPixels=true;\n \n },\n \n // fadeState: function(){\n // game.state.start('play');\n\n // },\n start: function(){\n game.paused = (game.paused) ? false : true;\n game.state.start('play');\n \n },\n // startgame: function(){\n // game.state.start('play');\n \n // },\n\n};", "meta": {"content_hash": "1c932c7359417cc3e05f3380eabb7fac", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 89, "avg_line_length": 25.41176470588235, "alnum_prop": 0.5601851851851852, "repo_name": "nickchulani99/ITE-445", "id": "d9d5cb9bb5e54f95b73df6a87d4bcecebdc40318", "size": "1296", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "final/alien copy 4/js/video.js", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "16832"}, {"name": "JavaScript", "bytes": "451272"}]}} {"text": "package mts\n\n//Licensed under the Apache License, Version 2.0 (the \"License\");\n//you may not use this file except in compliance with the License.\n//You may obtain a copy of the License at\n//\n//http://www.apache.org/licenses/LICENSE-2.0\n//\n//Unless required by applicable law or agreed to in writing, software\n//distributed under the License is distributed on an \"AS IS\" BASIS,\n//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n//See the License for the specific language governing permissions and\n//limitations under the License.\n//\n// Code generated by Alibaba Cloud SDK Code Generator.\n// Changes may cause incorrect behavior and will be lost if the code is regenerated.\n\nimport (\n\t\"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests\"\n\t\"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses\"\n)\n\n// SearchMedia invokes the mts.SearchMedia API synchronously\nfunc (client *Client) SearchMedia(request *SearchMediaRequest) (response *SearchMediaResponse, err error) {\n\tresponse = CreateSearchMediaResponse()\n\terr = client.DoAction(request, response)\n\treturn\n}\n\n// SearchMediaWithChan invokes the mts.SearchMedia API asynchronously\nfunc (client *Client) SearchMediaWithChan(request *SearchMediaRequest) (<-chan *SearchMediaResponse, <-chan error) {\n\tresponseChan := make(chan *SearchMediaResponse, 1)\n\terrChan := make(chan error, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tdefer close(responseChan)\n\t\tdefer close(errChan)\n\t\tresponse, err := client.SearchMedia(request)\n\t\tif err != nil {\n\t\t\terrChan <- err\n\t\t} else {\n\t\t\tresponseChan <- response\n\t\t}\n\t})\n\tif err != nil {\n\t\terrChan <- err\n\t\tclose(responseChan)\n\t\tclose(errChan)\n\t}\n\treturn responseChan, errChan\n}\n\n// SearchMediaWithCallback invokes the mts.SearchMedia API asynchronously\nfunc (client *Client) SearchMediaWithCallback(request *SearchMediaRequest, callback func(response *SearchMediaResponse, err error)) <-chan int {\n\tresult := make(chan int, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tvar response *SearchMediaResponse\n\t\tvar err error\n\t\tdefer close(result)\n\t\tresponse, err = client.SearchMedia(request)\n\t\tcallback(response, err)\n\t\tresult <- 1\n\t})\n\tif err != nil {\n\t\tdefer close(result)\n\t\tcallback(nil, err)\n\t\tresult <- 0\n\t}\n\treturn result\n}\n\n// SearchMediaRequest is the request struct for api SearchMedia\ntype SearchMediaRequest struct {\n\t*requests.RpcRequest\n\tResourceOwnerId requests.Integer `position:\"Query\" name:\"ResourceOwnerId\"`\n\tDescription string `position:\"Query\" name:\"Description\"`\n\tTitle string `position:\"Query\" name:\"Title\"`\n\tPageNumber requests.Integer `position:\"Query\" name:\"PageNumber\"`\n\tCateId string `position:\"Query\" name:\"CateId\"`\n\tPageSize requests.Integer `position:\"Query\" name:\"PageSize\"`\n\tFrom string `position:\"Query\" name:\"From\"`\n\tTag string `position:\"Query\" name:\"Tag\"`\n\tKeyWord string `position:\"Query\" name:\"KeyWord\"`\n\tResourceOwnerAccount string `position:\"Query\" name:\"ResourceOwnerAccount\"`\n\tOwnerAccount string `position:\"Query\" name:\"OwnerAccount\"`\n\tOwnerId requests.Integer `position:\"Query\" name:\"OwnerId\"`\n\tSortBy string `position:\"Query\" name:\"SortBy\"`\n\tTo string `position:\"Query\" name:\"To\"`\n}\n\n// SearchMediaResponse is the response struct for api SearchMedia\ntype SearchMediaResponse struct {\n\t*responses.BaseResponse\n\tTotalNum int64 `json:\"TotalNum\" xml:\"TotalNum\"`\n\tPageSize int64 `json:\"PageSize\" xml:\"PageSize\"`\n\tRequestId string `json:\"RequestId\" xml:\"RequestId\"`\n\tPageNumber int64 `json:\"PageNumber\" xml:\"PageNumber\"`\n\tMediaList MediaListInSearchMedia `json:\"MediaList\" xml:\"MediaList\"`\n}\n\n// CreateSearchMediaRequest creates a request to invoke SearchMedia API\nfunc CreateSearchMediaRequest() (request *SearchMediaRequest) {\n\trequest = &SearchMediaRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"Mts\", \"2014-06-18\", \"SearchMedia\", \"mts\", \"openAPI\")\n\trequest.Method = requests.POST\n\treturn\n}\n\n// CreateSearchMediaResponse creates a response to parse from SearchMedia response\nfunc CreateSearchMediaResponse() (response *SearchMediaResponse) {\n\tresponse = &SearchMediaResponse{\n\t\tBaseResponse: &responses.BaseResponse{},\n\t}\n\treturn\n}\n", "meta": {"content_hash": "4765aae0af2406ea691fb001ea5a83df", "timestamp": "", "source": "github", "line_count": 116, "max_line_length": 144, "avg_line_length": 38.23275862068966, "alnum_prop": 0.7019165727170237, "repo_name": "aliyun/alibaba-cloud-sdk-go", "id": "e23386a31fa5de227e10f19984b8f3e7eb736f22", "size": "4435", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "services/mts/search_media.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Go", "bytes": "734307"}, {"name": "Makefile", "bytes": "183"}]}} {"text": "\n\n\n \n Invitation to calendar\n \n\n\n

You was invited to calendar.

\n\n

You can login and set password procceed by link:

\n\n\">\n\n", "meta": {"content_hash": "324efbc1ad28fdfe902cd1e51f7e095e", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 76, "avg_line_length": 23.733333333333334, "alnum_prop": 0.6432584269662921, "repo_name": "vchukhalyonock/calendar", "id": "5ca5e852381e8a65c1d2612696445e148aa8677b", "size": "356", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "application/views/email/invite.php", "mode": "33188", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "366"}, {"name": "CSS", "bytes": "39144"}, {"name": "HTML", "bytes": "5502"}, {"name": "JavaScript", "bytes": "460150"}, {"name": "PHP", "bytes": "1807984"}]}} {"text": "import os\nfrom flask import Flask,render_template,url_for,request,session,redirect\nfrom flask_login import LoginManager\nfrom flask_bootstrap import Bootstrap\nfrom flask_script import Manager,Shell\nfrom flask_sqlalchemy import SQLAlchemy\nfrom flask_mail import Mail\nfrom flask_moment import Moment\nfrom flask_socketio import SocketIO\nfrom flask_gravatar import Gravatar\n\napp = Flask(__name__)\nbasedir = os.path.abspath(os.path.dirname(__file__))\napp.config['SQLALCHEMY_DATABASE_URI'] =\\\n 'sqlite:///' + os.path.join(basedir, 'data.sqlite')\n#app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://sql6140009:Y1912zwYwC@sql6.freemysqlhosting.net/sql6140009'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napp.config['SECRET_KEY'] = 'hard to guess string'\n\napp.config['MAIL_SERVER'] = 'smtp.googlemail.com'\napp.config['MAIL_PORT'] = 587\napp.config['MAIL_USE_TLS'] = True\napp.config['MAIL_USERNAME'] = os.environ.get('MAIL_USERNAME')\napp.config['MAIL_PASSWORD'] = os.environ.get('MAIL_PASSWORD')\n\nmanager = Manager(app)\nbootstrap = Bootstrap()\ndb = SQLAlchemy(app)\nmail = Mail(app)\nmoment = Moment(app)\nsocketio = SocketIO(app)\ngravatar = Gravatar(app)\n\nlogin_manager = LoginManager()\nlogin_manager.session_protection = 'strong'\nlogin_manager.login_view = 'auth.login'\n# app.config['SECRET_KEY'] = 'hard to guess string'\n# app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True\n#app = create_app('DEVELOPMENT')\n\nbootstrap.init_app(app)\n#db.init_app(app)\nlogin_manager.init_app(app)\n\nfrom app import models\n\n@app.route('/')\ndef index():\n return render_template('index.html')\n\nfrom app.auth.views import admin\napp.register_blueprint(auth.views.admin,url_prefix = '/authentication')\n\nfrom app.main.views import welcome\napp.register_blueprint(main.views.welcome,url_prefix = '/welcome')\n\nfrom app.twitterAPI.views import api\napp.register_blueprint(twitterAPI.views.api,url_prefix = '/api')\n", "meta": {"content_hash": "a9735eefc6ff4807441825a5f2811599", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 117, "avg_line_length": 32.62068965517241, "alnum_prop": 0.7563424947145877, "repo_name": "sumedh123/debatify", "id": "89d78b1a48e585a5353b33fa5344659ba9f8770a", "size": "1892", "binary": false, "copies": "1", "ref": "refs/heads/UI", "path": "app/__init__.py", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "5939"}, {"name": "CSS", "bytes": "347155"}, {"name": "HTML", "bytes": "102503"}, {"name": "JavaScript", "bytes": "608373"}, {"name": "Python", "bytes": "8393673"}, {"name": "Shell", "bytes": "3298"}]}} {"text": "/*\n * String hash computation (interning).\n */\n\n#include \"duk_internal.h\"\n\n/* constants for duk_hashstring() */\n#define STRING_HASH_SHORTSTRING 4096\n#define STRING_HASH_MEDIUMSTRING (256 * 1024)\n#define STRING_HASH_BLOCKSIZE 256\n\nduk_uint32_t duk_heap_hashstring(duk_heap *heap, duk_uint8_t *str, duk_size_t len) {\n\t/*\n\t * Sampling long strings by byte skipping (like Lua does) is potentially\n\t * a cache problem. Here we do 'block skipping' instead for long strings:\n\t * hash an initial part, and then sample the rest of the string with\n\t * reasonably sized chunks.\n\t *\n\t * Skip should depend on length and bound the total time to roughly\n\t * logarithmic.\n\t *\n\t * With current values:\n\t *\n\t * 1M string => 256 * 241 = 61696 bytes (0.06M) of hashing\n\t * 1G string => 256 * 16321 = 4178176 bytes (3.98M) of hashing\n\t *\n\t * After an initial part has been hashed, an offset is applied before\n\t * starting the sampling. The initial offset is computed from the\n\t * hash of the initial part of the string. The idea is to avoid the\n\t * case that all long strings have certain offset ranges that are never\n\t * sampled.\n\t */\n\t\n\t/* note: mixing len into seed improves hashing when skipping */\n\tduk_uint32_t str_seed = heap->hash_seed ^ len;\n\n\tif (len <= STRING_HASH_SHORTSTRING) {\n\t\treturn duk_util_hashbytes(str, len, str_seed);\n\t} else {\n\t\tduk_uint32_t hash;\n\t\tduk_size_t off;\n\t\tduk_size_t skip;\n\n\t\tif (len <= STRING_HASH_MEDIUMSTRING) {\n\t\t\tskip = (duk_size_t) (16 * STRING_HASH_BLOCKSIZE + STRING_HASH_BLOCKSIZE);\n\t\t} else {\n\t\t\tskip = (duk_size_t) (256 * STRING_HASH_BLOCKSIZE + STRING_HASH_BLOCKSIZE);\n\t\t}\n\n\t\thash = duk_util_hashbytes(str, (duk_size_t) STRING_HASH_SHORTSTRING, str_seed);\n\t\toff = STRING_HASH_SHORTSTRING + (skip * (hash % 256)) / 256;\n\n\t\t/* FIXME: inefficient loop */\n\t\twhile (off < len) {\n\t\t\tduk_size_t left = len - off;\n\t\t\tduk_size_t now = (duk_size_t) (left > STRING_HASH_BLOCKSIZE ? STRING_HASH_BLOCKSIZE : left);\n\t\t\thash ^= duk_util_hashbytes(str + off, now, str_seed);\n\t\t\toff += skip;\n\t\t}\n\n\t\treturn hash;\n\t}\n}\n\n", "meta": {"content_hash": "bab3317c67f40063ff7a69f3bcc74bb0", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 95, "avg_line_length": 32.140625, "alnum_prop": 0.6684491978609626, "repo_name": "JoshEngebretson/duktape", "id": "29411796cbb56b7d91920771a24db254493ccfc8", "size": "2057", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/duk_heap_hashstring.c", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "1972812"}, {"name": "C++", "bytes": "20922"}, {"name": "CoffeeScript", "bytes": "895"}, {"name": "JavaScript", "bytes": "15926045"}, {"name": "Objective-C", "bytes": "6054"}, {"name": "Python", "bytes": "136104"}, {"name": "Shell", "bytes": "12610"}]}} {"text": "--TEST--\nRunkit_Sandbox_Parent Class -- Echo\n--SKIPIF--\n\n--FILE--\nini_set('display_errors', true);\n$php->ini_set('html_errors', false);\n$php->eval('$PARENT = new Runkit_Sandbox_Parent;\n\t\t\techo \"Foo\\n\";\n\t\t\t$PARENT->echo(\"BarBar\\n\");');\n\nfunction test_handler($str) {\n if (strlen($str) == 0) return NULL; /* flush() */\n /* Echoing and returning have the same effect here, both go to parent's output chain */\n echo 'Received string from sandbox: ' . strlen($str) . \" bytes long.\\n\";\n\n return strtoupper($str);\n}\n--EXPECT--\nReceived string from sandbox: 4 bytes long.\nFOO\nReceived string from sandbox: 149 bytes long.\n\nWARNING: RUNKIT_SANDBOX_PARENT::ECHO(): ACCESS TO ECHO DATA IN THE PARENT CONTEXT IS NOT ENABLED IN UNKNOWN(0) : RUNKIT_SANDBOX EVAL CODE ON LINE 3\n", "meta": {"content_hash": "009fcdd5cf234bb851939760b7bb2bec", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 147, "avg_line_length": 36.93333333333333, "alnum_prop": 0.6796028880866426, "repo_name": "lzpfmh/runkit", "id": "44237969acec682b89517ba99d074aa575d5c09a", "size": "1108", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "tests/Runkit_Sandbox_Parent__.echo.access.phpt", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "263129"}, {"name": "C++", "bytes": "372"}, {"name": "PHP", "bytes": "141611"}, {"name": "SourcePawn", "bytes": "193"}]}} {"text": "\npackage org.codehaus.groovy.grails.scaffolding;\n\nimport grails.build.logging.GrailsConsole;\nimport groovy.text.SimpleTemplateEngine;\nimport groovy.text.Template;\n\nimport java.io.BufferedWriter;\nimport java.io.File;\nimport java.io.FileWriter;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.Writer;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Map;\nimport java.util.Set;\n\nimport org.apache.commons.logging.Log;\nimport org.apache.commons.logging.LogFactory;\nimport org.codehaus.groovy.grails.commons.GrailsApplication;\nimport org.codehaus.groovy.grails.commons.GrailsDomainClass;\nimport org.codehaus.groovy.grails.commons.GrailsDomainClassProperty;\nimport org.codehaus.groovy.grails.plugins.GrailsPluginInfo;\nimport org.codehaus.groovy.grails.plugins.GrailsPluginManager;\nimport org.codehaus.groovy.grails.plugins.GrailsPluginUtils;\nimport org.codehaus.groovy.grails.plugins.PluginManagerAware;\nimport org.codehaus.groovy.runtime.IOGroovyMethods;\nimport org.codehaus.groovy.runtime.StringGroovyMethods;\nimport org.springframework.context.ResourceLoaderAware;\nimport org.springframework.core.io.AbstractResource;\nimport org.springframework.core.io.FileSystemResource;\nimport org.springframework.core.io.Resource;\nimport org.springframework.core.io.ResourceLoader;\nimport org.springframework.core.io.support.PathMatchingResourcePatternResolver;\nimport org.springframework.util.Assert;\nimport org.springframework.util.StringUtils;\n\npublic abstract class AbstractGrailsTemplateGenerator implements GrailsTemplateGenerator, ResourceLoaderAware, PluginManagerAware {\n\n\tprotected static final Log log = LogFactory.getLog(AbstractGrailsTemplateGenerator.class);\n\n\tprotected String basedir = \".\";\n\tprotected boolean overwrite = false;\n\tprotected SimpleTemplateEngine engine = new SimpleTemplateEngine();\n\tprotected ResourceLoader resourceLoader;\n\tprotected Template renderEditorTemplate;\n\tprotected String domainSuffix = \"\";\n\tprotected GrailsPluginManager pluginManager;\n\tprotected GrailsApplication grailsApplication;\n\n\tprotected AbstractGrailsTemplateGenerator(ClassLoader classLoader) {\n\t\tengine = new SimpleTemplateEngine(classLoader);\n\t}\n\n\tpublic void generateViews(GrailsDomainClass domainClass, String destDir) throws IOException {\n\t\tAssert.hasText(destDir, \"Argument [destdir] not specified\");\n\n\t\tFile viewsDir = new File(destDir, \"grails-app/views/\" + domainClass.getPropertyName());\n\t\tif (!viewsDir.exists()) {\n\t\t\tviewsDir.mkdirs();\n\t\t}\n\n\t\tfor (String name : getTemplateNames()) {\n if(log.isInfoEnabled())\n\t\t\t log.info(\"Generating [\"+name+\"] view for domain class [\"+domainClass.getFullName()+\"]\");\n\t\t\tgenerateView(domainClass, name, viewsDir.getAbsolutePath());\n\t\t}\n\t}\n\n\tpublic void generateController(GrailsDomainClass domainClass, String destDir) throws IOException {\n\t\tAssert.hasText(destDir, \"Argument [destdir] not specified\");\n\n\t\tif (domainClass == null) {\n\t\t\treturn;\n\t\t}\n\n\t\tString fullName = domainClass.getFullName();\n\t\tString pkg = \"\";\n\t\tint pos = fullName.lastIndexOf('.');\n\t\tif (pos != -1) {\n\t\t\t// Package name with trailing '.'\n\t\t\tpkg = fullName.substring(0, pos + 1);\n\t\t}\n\n\t\tFile destFile = new File(destDir, \"grails-app/controllers/\" + pkg.replace('.', '/') + domainClass.getShortName() + \"Controller.groovy\");\n\t\tif (canWrite(destFile)) {\n\t\t\tdestFile.getParentFile().mkdirs();\n\n\t\t\tBufferedWriter writer = null;\n\t\t\ttry {\n\t\t\t\twriter = new BufferedWriter(new FileWriter(destFile));\n\t\t\t\tgenerateController(domainClass, writer);\n\t\t\t\ttry {\n\t\t\t\t\twriter.flush();\n\t\t\t\t}\n\t\t\t\tcatch (IOException ignored) {}\n\t\t\t}\n\t\t\tfinally {\n\t\t\t\tIOGroovyMethods.closeQuietly(writer);\n\t\t\t}\n\n\t\t\tlog.info(\"Controller generated at [\"+destFile+\"]\");\n\t\t}\n\t}\n\n @Override\n public void generateAsyncController(GrailsDomainClass domainClass, String destDir) throws IOException {\n Assert.hasText(destDir, \"Argument [destdir] not specified\");\n\n if (domainClass == null) {\n return;\n }\n\n String fullName = domainClass.getFullName();\n String pkg = \"\";\n int pos = fullName.lastIndexOf('.');\n if (pos != -1) {\n // Package name with trailing '.'\n pkg = fullName.substring(0, pos + 1);\n }\n\n File destFile = new File(destDir, \"grails-app/controllers/\" + pkg.replace('.', '/') + domainClass.getShortName() + \"Controller.groovy\");\n if (canWrite(destFile)) {\n destFile.getParentFile().mkdirs();\n\n BufferedWriter writer = null;\n try {\n writer = new BufferedWriter(new FileWriter(destFile));\n generateAsyncController(domainClass, writer);\n try {\n writer.flush();\n }\n catch (IOException ignored) {}\n }\n finally {\n IOGroovyMethods.closeQuietly(writer);\n }\n\n log.info(\"Controller generated at [\"+destFile+\"]\");\n }\n }\n\n public void generateView(GrailsDomainClass domainClass, String viewName, Writer out) throws IOException {\n\t\tString templateText = getTemplateText(viewName + \".gsp\");\n\n\t\tif (!StringUtils.hasLength(templateText)) {\n\t\t\treturn;\n\t\t}\n\n\t\tGrailsDomainClassProperty multiPart = null;\n\t\tfor (GrailsDomainClassProperty property : domainClass.getProperties()) {\n\t\t\tif (property.getType() == Byte[].class || property.getType() == byte[].class) {\n\t\t\t\tmultiPart = property;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\n\t\tString packageName = StringUtils.hasLength(domainClass.getPackageName()) ? \"<%@ page import=\\\"\" + domainClass.getFullName() + \"\\\" %>\" : \"\";\n\t\tMap binding = createBinding(domainClass);\n\t\tbinding.put(\"packageName\", packageName);\n\t\tbinding.put(\"multiPart\", multiPart);\n\t\tbinding.put(\"propertyName\", getPropertyName(domainClass));\n\n\t\tgenerate(templateText, binding, out);\n\t}\n\n\tprotected abstract Object getRenderEditor();\n\n\tpublic void generateView(GrailsDomainClass domainClass, String viewName, String destDir) throws IOException {\n\t\tFile destFile = new File(destDir, viewName + \".gsp\");\n\t\tif (!canWrite(destFile)) {\n\t\t\treturn;\n\t\t}\n\n\t\tBufferedWriter writer = null;\n\t\ttry {\n\t\t\twriter = new BufferedWriter(new FileWriter(destFile));\n\t\t\tgenerateView(domainClass, viewName, writer);\n\t\t\ttry {\n\t\t\t\twriter.flush();\n\t\t\t}\n\t\t\tcatch (IOException ignored) {}\n\t\t}\n\t\tfinally {\n\t\t\tIOGroovyMethods.closeQuietly(writer);\n\t\t}\n\t}\n\n\tpublic void generateController(GrailsDomainClass domainClass, Writer out) throws IOException {\n\t\tString templateText = getTemplateText(\"Controller.groovy\");\n\n\t\tMap binding = createBinding(domainClass);\n\t\tbinding.put(\"packageName\", domainClass.getPackageName());\n\t\tbinding.put(\"propertyName\", getPropertyName(domainClass));\n\n\t\tgenerate(templateText, binding, out);\n\t}\n\n public void generateAsyncController(GrailsDomainClass domainClass, Writer out) throws IOException {\n String templateText = getTemplateText(\"AsyncController.groovy\");\n\n Map binding = createBinding(domainClass);\n binding.put(\"packageName\", domainClass.getPackageName());\n binding.put(\"propertyName\", getPropertyName(domainClass));\n\n generate(templateText, binding, out);\n }\n\n @Override\n public void generateAsyncTest(GrailsDomainClass domainClass, String destDir) throws IOException {\n generateTest(domainClass, destDir, \"AsyncSpec.groovy\");\n }\n\n\tpublic void generateTest(GrailsDomainClass domainClass, String destDir) throws IOException {\n generateTest(domainClass, destDir, \"Spec.groovy\");\n\t}\n\n private void generateTest(GrailsDomainClass domainClass, String destDir, String templateName) throws IOException {\n File destFile = new File(destDir, domainClass.getPackageName().replace('.', '/') + '/' + domainClass.getShortName() + \"ControllerSpec.groovy\");\n if (!canWrite(destFile)) {\n return;\n }\n\n String templateText = getTemplateText(templateName);\n\n Map binding = createBinding(domainClass);\n binding.put(\"packageName\", domainClass.getPackageName());\n binding.put(\"propertyName\", domainClass.getLogicalPropertyName());\n binding.put(\"modelName\", getPropertyName(domainClass));\n\n destFile.getParentFile().mkdirs();\n BufferedWriter writer = null;\n try {\n writer = new BufferedWriter(new FileWriter(destFile));\n generate(templateText, binding, writer);\n try {\n writer.flush();\n }\n catch (IOException ignored) {}\n }\n finally {\n IOGroovyMethods.closeQuietly(writer);\n }\n }\n\n\n @SuppressWarnings(\"deprecation\")\n protected Map createBinding(GrailsDomainClass domainClass) {\n\t\tboolean hasHibernate = pluginManager.hasGrailsPlugin(\"hibernate\") || pluginManager.hasGrailsPlugin(\"hibernate4\");\n\n\t\tMap binding = new HashMap();\n\t\tbinding.put(\"pluginManager\", pluginManager);\n\t\tbinding.put(\"domainClass\", domainClass);\n\t\tbinding.put(\"className\", domainClass.getShortName());\n\t\tbinding.put(\"renderEditor\", getRenderEditor());\n\t\tbinding.put(\"comparator\", hasHibernate ? DomainClassPropertyComparator.class : SimpleDomainClassPropertyComparator.class);\n\t\treturn binding;\n\t}\n\n\tprotected void generate(String templateText, Map binding, Writer out) {\n\t\ttry {\n\t\t\tengine.createTemplate(templateText).make(binding).writeTo(out);\n\t\t}\n\t\tcatch (ClassNotFoundException e) {\n\t\t\tthrow new RuntimeException(e);\n\t\t}\n\t\tcatch (IOException e) {\n\t\t\tthrow new RuntimeException(e);\n\t\t}\n\t}\n\n\tprotected String getPropertyName(GrailsDomainClass domainClass) {\n\t\treturn domainClass.getPropertyName() + domainSuffix;\n\t}\n\n\tprotected String getTemplateText(String template) throws IOException {\n\t\tInputStream inputStream = null;\n\t\tif (resourceLoader != null && grailsApplication.isWarDeployed()) {\n\t\t\tinputStream = resourceLoader.getResource(\"/WEB-INF/templates/scaffolding/\" + template).getInputStream();\n\t\t}\n\t\telse {\n\t\t\tAbstractResource templateFile = getTemplateResource(template);\n\t\t\tif (templateFile.exists()) {\n\t\t\t\tinputStream = templateFile.getInputStream();\n\t\t\t}\n\t\t}\n\n\t\treturn inputStream == null ? null : IOGroovyMethods.getText(inputStream);\n\t}\n\n\tprotected AbstractResource getTemplateResource(String template) throws IOException {\n\t\tString name = \"src/templates/scaffolding/\" + template;\n\t\tAbstractResource templateFile = new FileSystemResource(new File(basedir, name).getAbsoluteFile());\n\t\tif (!templateFile.exists()) {\n\t\t\ttemplateFile = new FileSystemResource(new File(getPluginDir(), name).getAbsoluteFile());\n\t\t}\n\n\t\treturn templateFile;\n\t}\n\n\tprotected File getPluginDir() throws IOException {\n\t\tGrailsPluginInfo info = GrailsPluginUtils.getPluginBuildSettings().getPluginInfoForName(\"scaffolding\");\n\t\treturn info.getDescriptor().getFile().getParentFile();\n\t}\n\n\tprotected boolean canWrite(File testFile) {\n\t\tif (overwrite || !testFile.exists()) {\n\t\t\treturn true;\n\t\t}\n\n\t\ttry {\n\t\t\tString relative = makeRelativeIfPossible(testFile.getAbsolutePath(), basedir);\n\t\t\tString response = GrailsConsole.getInstance().userInput(\n\t\t\t\t\t\"File \" + relative + \" already exists. Overwrite?\", new String[] { \"y\", \"n\", \"a\" });\n\t\t\toverwrite = overwrite || \"a\".equals(response);\n\t\t\treturn overwrite || \"y\".equals(response);\n\t\t}\n\t\tcatch (Exception e) {\n\t\t\t// failure to read from standard in means we're probably running from an automation tool like a build server\n\t\t\treturn true;\n\t\t}\n\t}\n\n\tprotected String makeRelativeIfPossible(String fileName, String base) throws IOException {\n\t\tif (StringUtils.hasLength(base)) {\n\t\t\tfileName = StringGroovyMethods.minus(fileName, new File(base).getCanonicalPath());\n\t\t}\n\t\treturn fileName;\n\t}\n\n\tprotected Set getTemplateNames() throws IOException {\n\n\t\tif (resourceLoader != null && grailsApplication.isWarDeployed()) {\n\t\t\ttry {\n\t\t\t\tPathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(resourceLoader);\n\t\t\t\treturn extractNames(resolver.getResources(\"/WEB-INF/templates/scaffolding/*.gsp\"));\n\t\t\t}\n\t\t\tcatch (Exception e) {\n\t\t\t\treturn Collections.emptySet();\n\t\t\t}\n\t\t}\n\n\t\tPathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();\n\t\tSet resources = new HashSet();\n\n\t\tString templatesDirPath = basedir + \"/src/templates/scaffolding\";\n\t\tResource templatesDir = new FileSystemResource(templatesDirPath);\n\t\tif (templatesDir.exists()) {\n\t\t\ttry {\n\t\t\t\tresources.addAll(extractNames(resolver.getResources(\"file:\" + templatesDirPath + \"/*.gsp\")));\n\t\t\t}\n\t\t\tcatch (Exception e) {\n\t\t\t\tlog.error(\"Error while loading views from \" + basedir, e);\n\t\t\t}\n\t\t}\n\n\t\tFile pluginDir = getPluginDir();\n\t\ttry {\n\t\t\tresources.addAll(extractNames(resolver.getResources(\"file:\" + pluginDir + \"/src/templates/scaffolding/*.gsp\")));\n\t\t}\n\t\tcatch (Exception e) {\n\t\t\t// ignore\n\t\t\tlog.error(\"Error locating templates from \" + pluginDir + \": \" + e.getMessage(), e);\n\t\t}\n\n\t\treturn resources;\n\t}\n\n\tprotected Set extractNames(Resource[] resources) {\n\t\tSet names = new HashSet();\n\t\tfor (Resource resource : resources) {\n\t\t\tString name = resource.getFilename();\n\t\t\tnames.add(name.substring(0, name.length() - 4));\n\t\t}\n\t\treturn names;\n\t}\n\n\tpublic void setGrailsApplication(GrailsApplication ga) {\n\t\tgrailsApplication = ga;\n\t\tObject suffix = ga.getFlatConfig().get(\"grails.scaffolding.templates.domainSuffix\");\n\t\tif (suffix instanceof CharSequence) {\n\t\t\tdomainSuffix = suffix.toString();\n\t\t}\n\t}\n\n\tpublic void setResourceLoader(ResourceLoader rl) {\n if(log.isInfoEnabled())\n\t\t log.info(\"Scaffolding template generator set to use resource loader [\"+rl+\"]\");\n\t\tresourceLoader = rl;\n\t}\n\n\tpublic void setPluginManager(GrailsPluginManager gpm) {\n\t\tpluginManager = gpm;\n\t}\n\n\tpublic void setOverwrite(boolean shouldOverwrite) {\n\t\toverwrite = shouldOverwrite;\n\t}\n}\n", "meta": {"content_hash": "8970b7709bd54fcfed5f3a9952916ba2", "timestamp": "", "source": "github", "line_count": 401, "max_line_length": 151, "avg_line_length": 34.53366583541147, "alnum_prop": 0.7154823801270942, "repo_name": "eptresmo/dbMigrationTest", "id": "e723c197e0218138ec76d778e67165fef6866c91", "size": "14445", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "dbMigrationTest/target/work/plugins/scaffolding-2.0.1/src/java/org/codehaus/groovy/grails/scaffolding/AbstractGrailsTemplateGenerator.java", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\ufeff// --------------------------------------------------------------------------------------------\r\n// \r\n// Copyright (C) 2011-2014 Effort Team\r\n//\r\n// Permission is hereby granted, free of charge, to any person obtaining a copy\r\n// of this software and associated documentation files (the \"Software\"), to deal\r\n// in the Software without restriction, including without limitation the rights\r\n// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\r\n// copies of the Software, and to permit persons to whom the Software is\r\n// furnished to do so, subject to the following conditions:\r\n//\r\n// The above copyright notice and this permission notice shall be included in\r\n// all copies or substantial portions of the Software.\r\n//\r\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\r\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\r\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\r\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\r\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\r\n// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\r\n// THE SOFTWARE.\r\n// \r\n// --------------------------------------------------------------------------------------------\r\n\r\nnamespace Effort.Provider\r\n{\r\n using System;\r\n using System.Data;\r\n using System.Data.Common;\r\n#if !EFOLD\r\n using System.Data.Entity.Core.Common;\r\n#endif\r\n\r\n /// \r\n /// Represents a set of methods for creating instances of the \r\n /// provider's implementation of the data source classes.\r\n /// \r\n public class EffortProviderFactory : DbProviderFactory, IServiceProvider\r\n {\r\n /// \r\n /// Provides a singleton instance of the class.\r\n /// \r\n public static readonly EffortProviderFactory Instance = new EffortProviderFactory();\r\n\r\n /// \r\n /// Prevents a default instance of the class\r\n /// from being created.\r\n /// \r\n private EffortProviderFactory()\r\n {\r\n }\r\n\r\n /// \r\n /// Returns a new instance of the class.\r\n /// \r\n /// \r\n /// A new instance of .\r\n /// \r\n public override DbConnection CreateConnection()\r\n {\r\n return new EffortConnection();\r\n }\r\n\r\n /// \r\n /// Gets the service object of the specified type.\r\n /// \r\n /// \r\n /// An object that specifies the type of service object to get.\r\n /// \r\n /// \r\n /// A service object of type .-or- null if there is\r\n /// no service object of type .\r\n /// \r\n public object GetService(Type serviceType)\r\n {\r\n if (serviceType == typeof(DbProviderServices))\r\n {\r\n return EffortProviderServices.Instance;\r\n }\r\n\r\n return null;\r\n }\r\n }\r\n}\r\n", "meta": {"content_hash": "767841cdd80110458b083c02f09c4e70", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 96, "avg_line_length": 42.023809523809526, "alnum_prop": 0.5781869688385269, "repo_name": "wertzui/effort", "id": "62bcbbc5cb1dad86155bd8a97196855269b6d236", "size": "3532", "binary": false, "copies": "3", "ref": "refs/heads/develop", "path": "Main/Source/Effort/Provider/EffortProviderFactory.cs", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "1104"}, {"name": "C#", "bytes": "2446943"}]}} {"text": "import { VGrid } from '../v-grid';\nimport { BindingContextInterface, OverrideContextInterface } from '../../interfaces';\nexport declare class VGridAttributesImageFix {\n private vGrid;\n private element;\n private value;\n private bindingContext;\n private overrideContext;\n constructor(element: HTMLImageElement, vGrid: VGrid);\n valueChanged(newValue: string): void;\n bind(bindingContext: BindingContextInterface, overrideContext: OverrideContextInterface): void;\n}\n", "meta": {"content_hash": "288050943bddd389f02d6c1488f7c9da", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 99, "avg_line_length": 40.5, "alnum_prop": 0.7551440329218106, "repo_name": "vegarringdal/vGrid", "id": "7a892151b3f517aeff4464ab7de2c01ac475f8d0", "size": "486", "binary": false, "copies": "4", "ref": "refs/heads/dev-rebuild", "path": "dist/es2015/grid/attributes/v-image.d.ts", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "24447"}, {"name": "HTML", "bytes": "19771"}, {"name": "JavaScript", "bytes": "7057"}, {"name": "TypeScript", "bytes": "1639454"}]}} {"text": "layout: page\ntitle: Cliffs Tech Executive Retreat\ndate: 2016-05-24\nauthor: Beverly Snyder\ntags: weekly links, java\nstatus: published\nsummary: Vivamus sed ligula quis mi cursus venenatis sed sed nunc.\nbanner: images/banner/people.jpg\nbooking:\n startDate: 05/29/2018\n endDate: 05/31/2018\n ctyhocn: HSTFLHX\n groupCode: CTER\npublished: true\n---\nMauris id odio eget libero fermentum egestas vel vitae ligula. Suspendisse vestibulum ipsum sem, vel ornare ex lacinia ac. Etiam a sem auctor, dignissim tellus vitae, consequat arcu. Cras rutrum lorem metus, sed lacinia nunc sollicitudin non. Praesent quam mi, aliquet ut lorem at, dictum faucibus ante. Vivamus sit amet ligula vulputate, sollicitudin arcu at, ullamcorper elit. Aenean aliquet molestie tincidunt. Nulla sed lectus diam. Donec gravida enim ut lorem blandit, sed ultricies arcu auctor. Donec odio ante, fringilla id lacus vel, sagittis dignissim elit. Duis mattis turpis tellus, at pulvinar leo commodo at. Etiam in lectus odio. Curabitur ipsum nisi, tincidunt eu ligula ac, cursus pellentesque lacus. Quisque enim sem, tempor non convallis maximus, semper id nisl. Donec eu sapien vel sapien posuere semper. Cras nisi justo, rhoncus ac urna at, cursus scelerisque libero.\n\n1 Proin bibendum tortor at ipsum commodo, vel gravida elit pretium\n1 Phasellus tincidunt lorem vitae elit ultrices, id volutpat mi fermentum\n1 Nunc interdum orci vel lobortis sodales.\n\nPraesent at risus ipsum. Morbi mattis blandit mauris, in fermentum nibh condimentum ut. In mattis risus et diam sagittis euismod. Sed vel dolor id dui dapibus viverra. Praesent efficitur ut quam et auctor. Sed venenatis convallis ex, nec vulputate est tincidunt vitae. In pulvinar faucibus odio, eu viverra tortor pharetra iaculis. Nunc cursus sagittis mauris, at blandit nisi auctor eget. Sed mollis diam eu volutpat pulvinar. In a luctus felis. Integer lobortis purus id lacus porta rutrum. Etiam sit amet lobortis magna. Nulla id libero convallis, luctus lorem ac, fermentum orci. Integer vel maximus sapien, non blandit sapien. Vivamus a tempus purus.\n", "meta": {"content_hash": "43f9f6179677a71fbbce864c5125cf6a", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 887, "avg_line_length": 94.31818181818181, "alnum_prop": 0.8043373493975904, "repo_name": "KlishGroup/prose-pogs", "id": "6f3c04ea1edb8b269359b353872dd5964911f4eb", "size": "2079", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "pogs/H/HSTFLHX/CTER/index.md", "mode": "33188", "license": "mit", "language": []}} {"text": "namespace igl\n{\n // Subdivide a mesh without moving vertices: loop subdivision but odd\n // vertices stay put and even vertices are just edge midpoints\n // \n // Templates:\n // MatV matrix for vertex positions, e.g. MatrixXd\n // MatF matrix for vertex positions, e.g. MatrixXi\n // Inputs:\n // V #V by dim mesh vertices\n // F #F by 3 mesh triangles\n // Outputs:\n // NV new vertex positions, V is guaranteed to be at top\n // NF new list of face indices\n //\n // NOTE: V should not be the same as NV,\n // NOTE: F should not be the same as NF, use other proto\n //\n // Known issues:\n // - assumes (V,F) is edge-manifold.\n template <\n typename DerivedV, \n typename DerivedF,\n typename DerivedNV,\n typename DerivedNF>\n IGL_INLINE void upsample(\n const Eigen::PlainObjectBase& V,\n const Eigen::PlainObjectBase& F,\n Eigen::PlainObjectBase& NV,\n Eigen::PlainObjectBase& NF);\n // Virtually in place wrapper\n template <\n typename MatV, \n typename MatF>\n IGL_INLINE void upsample(\n MatV& V,\n MatF& F);\n}\n\n#ifndef IGL_STATIC_LIBRARY\n# include \"upsample.cpp\"\n#endif\n\n#endif\n", "meta": {"content_hash": "a1cdb7cff1bb173dd9cbbff48005f1e2", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 71, "avg_line_length": 26.772727272727273, "alnum_prop": 0.6612903225806451, "repo_name": "FabianRepository/SinusProject", "id": "5ca05336dfac2db8a87a07b48ee4c4d7219d853e", "size": "1710", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "Code/include/igl/upsample.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "1822674"}, {"name": "C++", "bytes": "11420706"}, {"name": "CMake", "bytes": "20858"}, {"name": "Makefile", "bytes": "4050"}, {"name": "Objective-C", "bytes": "11880"}, {"name": "Shell", "bytes": "15321"}]}} {"text": "/*___Generated_by_IDEA___*/\n\npackage com.grafixartist.androidn;\n\n/* This stub is only used by the IDE. It is NOT the BuildConfig class actually packed into the APK */\npublic final class BuildConfig {\n public final static boolean DEBUG = Boolean.parseBoolean(null);\n}", "meta": {"content_hash": "26542977f302c1033ce570a5d275b19c", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 101, "avg_line_length": 33.375, "alnum_prop": 0.7490636704119851, "repo_name": "Suleiman19/Android-N-Sample", "id": "3f2817dcd993334f72e2c9570e2c1671e560df79", "size": "267", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "gen/com/grafixartist/androidn/BuildConfig.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "10473"}]}} {"text": "fieldhandler = $fieldhandler;\n $this->database_prefix = $database_prefix;\n }\n\n /**\n * Get SQL\n *\n * At this point, all values have been input, filtered and set in element-specific item arrays\n * Now, generate the full SQL statement, building the SQL statement and quoting/escaping data\n *\n * @param null|string $sql\n *\n * @return string\n * @since 1.0.0\n */\n public function getSql($sql = null)\n {\n if ($sql === null || trim($sql) === '') {\n $this->generateSql();\n } else {\n $this->sql = '';\n $this->getExternalSql($sql);\n }\n\n return $this->sql;\n }\n\n /**\n * Get the current value (or default) of the specified property\n *\n * @param string $key `````\n * @param mixed $default\n *\n * @return mixed\n * @since 1.0.0\n */\n public function get($key, $default = null)\n {\n if ($this->$key === null) {\n $this->$key = $default;\n }\n\n return $this->$key;\n }\n\n /**\n * Clear Query String\n *\n * @return $this\n * @since 1.0.0\n */\n public function clearQuery()\n {\n $this->query_type = 'select';\n $this->distinct = false;\n $this->columns = array();\n $this->update_columns = array();\n $this->values = array();\n $this->from = array();\n $this->insert_into_table = null;\n $this->where_group = array();\n $this->where = array();\n $this->group_by = array();\n $this->having_group = array();\n $this->having = array();\n $this->query_object = 'list';\n $this->order_by = array();\n $this->use_pagination = 0;\n $this->offset = 0;\n $this->limit = 0;\n $this->sql = '';\n\n return $this;\n }\n\n /**\n * Set Query Type\n *\n * @param string $query_type\n *\n * @return $this\n * @since 1.0.0\n */\n public function setType($query_type = 'select')\n {\n if (in_array(strtolower($query_type), $this->query_type_array)) {\n $this->query_type = strtolower($query_type);\n } else {\n $this->query_type = 'select';\n }\n\n return $this;\n }\n\n /**\n * Retrieves the current date and time formatted in a manner compliant with the database driver\n *\n * @return string\n * @since 1.0.0\n */\n public function getDate()\n {\n $date = new DateTime();\n\n return $date->format($this->date_format);\n }\n\n /**\n * Returns a value for null date that is compliant with the database driver\n *\n * @return string\n * @since 1.0.0\n */\n public function getNullDate()\n {\n return $this->null_date;\n }\n\n /**\n * Retrieves the PHP date format compliant with the database driver\n *\n * @return string\n * @since 1.0.0\n */\n public function getDateFormat()\n {\n return $this->date_format;\n }\n\n /**\n * Set Distinct Indicator\n *\n * @param boolean $distinct\n *\n * @return $this\n * @since 1.0.0\n */\n public function setDistinct($distinct = false)\n {\n if ($distinct === true) {\n $this->distinct = true;\n } else {\n $this->distinct = false;\n }\n\n return $this;\n }\n\n /**\n * Used for select, insert, and update to specify column name, alias (optional)\n * For Insert and Update, only, value and data_type\n *\n * @param string $column_name\n * @param null|string $alias\n * @param null|mixed $value\n * @param null|string $data_type\n *\n * @return $this\n * @since 1.0.0\n */\n public function select($column_name, $alias = null, $value = null, $data_type = null)\n {\n $this->editRequired('column_name', $column_name);\n\n if ($data_type === 'special') {\n } else {\n if ($value === null) {\n $data_type = 'column';\n }\n }\n\n $this->columns[$column_name]\n = $this->setItem($column_name, $data_type, $value, $alias, null, false);\n\n return $this;\n }\n\n /**\n * Set From table name and optional value for alias\n *\n * @param string $table_name\n * @param null|string $alias\n * @param boolean $primary\n *\n * @return $this\n * @since 1.0.0\n */\n public function from($table_name, $alias = null, $primary = false)\n {\n $this->editRequired('table_name', $table_name);\n\n $key = $this->setFromKey($table_name, $alias);\n\n $this->from[$key] = $this->setItem($table_name, 'string', null, $alias, null, false);\n\n if ($this->findFromPrimary() === true && $primary === false) {\n return $this;\n }\n\n $this->setFromPrimary($key);\n\n return $this;\n }\n\n /**\n * Set Insert Into Table Name\n *\n * @param string $table_name\n *\n * @return $this\n * @since 1.0.0\n */\n public function setInsertInto($table_name)\n {\n $this->editRequired('table_name', $table_name);\n\n $this->insert_into_table = (string)$table_name;\n\n return $this;\n }\n\n /**\n * Set From table key\n *\n * @param string $table_name\n * @param null|string $alias\n *\n * @return $this\n * @since 1.0.0\n */\n public function setFromKey($table_name, $alias = null)\n {\n return ($table_name . ' ' . $alias);\n }\n\n /**\n * Create a grouping for 'and' or 'or' groups of where conditions\n *\n * @param string $group\n * @param string $group_connector\n *\n * @return $this\n * @since 1.0.0\n */\n public function whereGroup($group, $group_connector = 'AND')\n {\n $this->where_group\n = $this->setGroup($group, $group_connector, 'where', $this->where_group);\n\n return $this;\n }\n\n /**\n * Create a grouping for 'and' or 'or' groups of having conditions\n *\n * @param string $group\n * @param string $group_connector\n *\n * @return $this\n * @since 1.0.0\n */\n public function havingGroup($group, $group_connector = 'AND')\n {\n $this->having_group\n = $this->setGroup($group, $group_connector, 'having', $this->having_group);\n\n return $this;\n }\n\n /**\n * Set Where Conditions for Query\n *\n * @param string $left_filter\n * @param string $left\n * @param string $condition\n * @param string $right_filter\n * @param string $right\n * @param string $connector\n * @param null|string $group\n *\n * @return $this\n * @since 1.0.0\n */\n public function where(\n $left_filter = 'column',\n $left = '',\n $condition = '',\n $right_filter = 'column',\n $right = '',\n $connector = 'and',\n $group = null\n ) {\n $item = $this->setLeftRightConditionals(\n $left_filter,\n $left,\n $condition,\n $right_filter,\n $right,\n $connector,\n $group\n );\n\n $this->where[] = $item;\n\n return $this;\n }\n\n /**\n * Set Having Conditions for Query\n *\n * @param string $left_filter\n * @param string $left\n * @param string $condition\n * @param string $right_filter\n * @param string $right\n * @param string $connector\n * @param null|string $group\n *\n * @return $this\n * @since 1.0.0\n */\n public function having(\n $left_filter = 'column',\n $left = '',\n $condition = '',\n $right_filter = 'column',\n $right = '',\n $connector = 'and',\n $group = null\n ) {\n $item = $this->setLeftRightConditionals(\n $left_filter,\n $left,\n $condition,\n $right_filter,\n $right,\n $connector,\n $group\n );\n\n $this->having[] = $item;\n\n return $this;\n }\n\n /**\n * Group By column name\n *\n * @param string $column_name\n *\n * @return $this\n * @since 1.0.0\n */\n public function groupBy($column_name)\n {\n $this->group_by[] = $this->setGroupByOrderBy($column_name, 'group by');\n\n return $this;\n }\n\n /**\n * Order By column name\n *\n * @param string $column_name\n * @param null|string $direction\n *\n * @return $this\n * @since 1.0.0\n */\n public function orderBy($column_name, $direction = 'ASC')\n {\n $this->order_by[] = $this->setGroupByOrderBy($column_name, 'order by', $direction);\n\n return $this;\n }\n\n /**\n * Offset and Limit\n *\n * @param int $offset\n * @param int $limit\n *\n * @return $this\n * @since 1.0.0\n */\n public function setOffsetAndLimit($offset = 0, $limit = 0)\n {\n $this->setOffsetOrLimit($offset, $type = 'offset');\n $this->setOffsetOrLimit($limit, $type = 'limit');\n\n return $this;\n }\n}\n", "meta": {"content_hash": "4cba7c7b8d58f8f451db992b967927b1", "timestamp": "", "source": "github", "line_count": 423, "max_line_length": 99, "avg_line_length": 23.6903073286052, "alnum_prop": 0.4923660313341982, "repo_name": "Molajo/Query", "id": "35418dea65445de82454beeae6395b58c24ef1b5", "size": "10218", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Source/Query/Builder/Sql.php", "mode": "33188", "license": "mit", "language": [{"name": "PHP", "bytes": "512092"}]}} {"text": "\n\npackage org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.math;\n\nimport java.math.BigDecimal;\nimport java.util.ArrayList;\nimport java.util.List;\nimport org.apache.beam.sdk.extensions.sql.impl.interpreter.BeamSqlFnExecutorTestBase;\nimport org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlExpression;\nimport org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlPrimitive;\nimport org.apache.calcite.sql.type.SqlTypeName;\nimport org.junit.Assert;\nimport org.junit.Test;\n\n/**\n * Test for {@link BeamSqlMathUnaryExpression}.\n */\npublic class BeamSqlMathUnaryExpressionTest extends BeamSqlFnExecutorTestBase {\n\n @Test public void testForGreaterThanOneOperands() {\n List operands = new ArrayList<>();\n\n // operands more than 1 not allowed\n operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 2));\n operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 4));\n Assert.assertFalse(new BeamSqlAbsExpression(operands).accept());\n }\n\n @Test public void testForOperandsType() {\n List operands = new ArrayList<>();\n\n // varchar operand not allowed\n operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, \"2\"));\n Assert.assertFalse(new BeamSqlAbsExpression(operands).accept());\n }\n\n @Test public void testForUnaryExpressions() {\n List operands = new ArrayList<>();\n\n // test for sqrt function\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n\n // test for abs function\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.BIGINT, -28965734597L));\n Assert.assertEquals(28965734597L,\n new BeamSqlAbsExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForLnExpression() {\n List operands = new ArrayList<>();\n\n // test for LN function with operand type smallint\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n Assert.assertEquals(Math.log(2),\n new BeamSqlLnExpression(operands).evaluate(record, null).getValue());\n\n // test for LN function with operand type double\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4));\n Assert\n .assertEquals(Math.log(2.4),\n new BeamSqlLnExpression(operands).evaluate(record, null).getValue());\n // test for LN function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56)));\n Assert.assertEquals(Math.log(2.56),\n new BeamSqlLnExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForLog10Expression() {\n List operands = new ArrayList<>();\n\n // test for log10 function with operand type smallint\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n Assert.assertEquals(Math.log10(2),\n new BeamSqlLogExpression(operands).evaluate(record, null).getValue());\n // test for log10 function with operand type double\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4));\n Assert.assertEquals(Math.log10(2.4),\n new BeamSqlLogExpression(operands).evaluate(record, null).getValue());\n // test for log10 function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56)));\n Assert.assertEquals(Math.log10(2.56),\n new BeamSqlLogExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForExpExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type smallint\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n Assert.assertEquals(Math.exp(2),\n new BeamSqlExpExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type double\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4));\n Assert.assertEquals(Math.exp(2.4),\n new BeamSqlExpExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56)));\n Assert.assertEquals(Math.exp(2.56),\n new BeamSqlExpExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForAcosExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type smallint\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n Assert.assertEquals(Double.NaN,\n new BeamSqlAcosExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type double\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 0.45));\n Assert.assertEquals(Math.acos(0.45),\n new BeamSqlAcosExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-0.367)));\n Assert.assertEquals(Math.acos(-0.367),\n new BeamSqlAcosExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForAsinExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type double\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 0.45));\n Assert.assertEquals(Math.asin(0.45),\n new BeamSqlAsinExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-0.367)));\n Assert.assertEquals(Math.asin(-0.367),\n new BeamSqlAsinExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForAtanExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type double\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 0.45));\n Assert.assertEquals(Math.atan(0.45),\n new BeamSqlAtanExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-0.367)));\n Assert.assertEquals(Math.atan(-0.367),\n new BeamSqlAtanExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForCosExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type double\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 0.45));\n Assert.assertEquals(Math.cos(0.45),\n new BeamSqlCosExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-0.367)));\n Assert.assertEquals(Math.cos(-0.367),\n new BeamSqlCosExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForCotExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type double\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, .45));\n Assert.assertEquals(1.0d / Math.tan(0.45),\n new BeamSqlCotExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(-.367)));\n Assert.assertEquals(1.0d / Math.tan(-0.367),\n new BeamSqlCotExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForDegreesExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type smallint\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n Assert.assertEquals(Math.toDegrees(2),\n new BeamSqlDegreesExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type double\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4));\n Assert.assertEquals(Math.toDegrees(2.4),\n new BeamSqlDegreesExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56)));\n Assert.assertEquals(Math.toDegrees(2.56),\n new BeamSqlDegreesExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForRadiansExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type smallint\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n Assert.assertEquals(Math.toRadians(2),\n new BeamSqlRadiansExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type double\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4));\n Assert.assertEquals(Math.toRadians(2.4),\n new BeamSqlRadiansExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56)));\n Assert.assertEquals(Math.toRadians(2.56),\n new BeamSqlRadiansExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForSinExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type smallint\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n Assert.assertEquals(Math.sin(2),\n new BeamSqlSinExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type double\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4));\n Assert.assertEquals(Math.sin(2.4),\n new BeamSqlSinExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56)));\n Assert.assertEquals(Math.sin(2.56),\n new BeamSqlSinExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForTanExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type smallint\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n Assert.assertEquals(Math.tan(2),\n new BeamSqlTanExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type double\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4));\n Assert.assertEquals(Math.tan(2.4),\n new BeamSqlTanExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56)));\n Assert.assertEquals(Math.tan(2.56),\n new BeamSqlTanExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForSignExpression() {\n List operands = new ArrayList<>();\n\n // test for exp function with operand type smallint\n operands.add(BeamSqlPrimitive.of(SqlTypeName.SMALLINT, Short.valueOf(\"2\")));\n Assert.assertEquals((short) 1\n , new BeamSqlSignExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type double\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.4));\n Assert.assertEquals(1.0, new BeamSqlSignExpression(operands).evaluate(record, null).getValue());\n // test for exp function with operand type decimal\n operands.clear();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DECIMAL, BigDecimal.valueOf(2.56)));\n Assert.assertEquals(BigDecimal.ONE,\n new BeamSqlSignExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForPi() {\n Assert.assertEquals(Math.PI, new BeamSqlPiExpression().evaluate(record, null).getValue());\n }\n\n @Test public void testForCeil() {\n List operands = new ArrayList<>();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.68687979));\n Assert.assertEquals(Math.ceil(2.68687979),\n new BeamSqlCeilExpression(operands).evaluate(record, null).getValue());\n }\n\n @Test public void testForFloor() {\n List operands = new ArrayList<>();\n operands.add(BeamSqlPrimitive.of(SqlTypeName.DOUBLE, 2.68687979));\n Assert.assertEquals(Math.floor(2.68687979),\n new BeamSqlFloorExpression(operands).evaluate(record, null).getValue());\n }\n\n}\n", "meta": {"content_hash": "0f912d403eeb23eb9c577b43291462b1", "timestamp": "", "source": "github", "line_count": 296, "max_line_length": 100, "avg_line_length": 44.7195945945946, "alnum_prop": 0.72675077434464, "repo_name": "wangyum/beam", "id": "d80a67071e9aebc3146dcaa4f1479f5802c1f00f", "size": "14042", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/operator/math/BeamSqlMathUnaryExpressionTest.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Go", "bytes": "109377"}, {"name": "Groovy", "bytes": "84452"}, {"name": "Java", "bytes": "14117162"}, {"name": "Python", "bytes": "3165393"}, {"name": "Shell", "bytes": "55385"}]}} {"text": "set -e\ngit clone https://github.com/nanomsg/nnpy.git\n# top of tree won't install\ncd nnpy\ngit checkout c7e718a5173447c85182dc45f99e2abcf9cd4065\nsudo pip install cffi\nsudo pip install .\ncd ..\n", "meta": {"content_hash": "2df4b09d1debc62ee0178e0ebf554c12", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 53, "avg_line_length": 23.75, "alnum_prop": 0.7947368421052632, "repo_name": "hanw/behavioral-model", "id": "77a7d63afe47a1741d549f97580015fe9cbc31ca", "size": "200", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "travis/install-nnpy.sh", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "93627"}, {"name": "C++", "bytes": "2126481"}, {"name": "M4", "bytes": "69750"}, {"name": "Makefile", "bytes": "44234"}, {"name": "P4", "bytes": "37531"}, {"name": "Python", "bytes": "271835"}, {"name": "Ruby", "bytes": "5182"}, {"name": "Shell", "bytes": "11424"}, {"name": "Thrift", "bytes": "25904"}]}} {"text": "\npackage au.net.zeus.rmi.tls;\n\nimport java.net.Socket;\nimport java.security.AccessController;\nimport java.security.GeneralSecurityException;\nimport java.security.NoSuchAlgorithmException;\nimport java.security.NoSuchProviderException;\nimport java.security.Principal;\nimport java.security.cert.CertPath;\nimport java.security.cert.CertificateException;\nimport java.security.cert.X509Certificate;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.logging.Level;\nimport java.util.logging.Logger;\nimport javax.security.auth.Subject;\nimport javax.security.auth.x500.X500PrivateCredential;\nimport net.jini.io.UnsupportedConstraintException;\nimport net.jini.security.AuthenticationPermission;\nimport org.apache.river.logging.Levels;\n\n/**\n *\n * @author peter\n */\nclass ClientSubjectKeyManager extends SubjectKeyManager {\n /** Client logger */\n private static final Logger logger = CLIENT_LOGGER;\n \n \n /** Returns the client logger */\n Logger getLogger() {\n\treturn logger;\n }\n \n ClientSubjectKeyManager(Subject subject) throws NoSuchAlgorithmException, NoSuchProviderException {\n\tsuper(subject);\n }\n\n @Override\n public String[] getClientAliases(String keyType, Principal[] issuers) {\n\tList certPaths = getCertificateChains(getSubject());\n\tif (certPaths == null) {\n\t return null;\n\t}\n\tCollection result = null;\n\tfor (int i = certPaths.size(); --i >= 0;) {\n\t CertPath chain = (CertPath) certPaths.get(i);\n\t Exception exception;\n\t try {\n\t\tif (checkChain(chain, keyType, issuers) != null) {\n\t\t if (result == null) {\n\t\t\tresult = new ArrayList(certPaths.size());\n\t\t }\n\t\t result.add(getCertificateName(firstX509Cert(chain)));\n\t\t}\n\t\tcontinue;\n\t } catch (SecurityException e) {\n\t\texception = e;\n\t } catch (GeneralSecurityException ex) {\n\t\texception = ex;\n\t }\n\t Logger logger = Logger.getLogger(SubjectKeyManager.class.getName());\n\t if (logger.isLoggable(Levels.HANDLED)) {\n\t\tlogger.log(Levels.HANDLED, \"Swallowed SecurityException thrown\", exception);\n\t }\n\t}\n\tif (result == null) {\n\t return null;\n\t} else {\n\t return (String[]) result.toArray(new String[result.size()]);\n\t}\n }\n\n @Override\n public synchronized String chooseClientAlias(String[] keyTypes, Principal[] issuers, Socket socket) {\n\t/*\n\t * Only choose new client credentials for the first handshake.\n\t * Otherwise, just use the previous client credentials.\n\t */\n\tif (clientCredential == null) {\n\t for (String keyType : keyTypes) {\n\t\ttry {\n\t\t if (exceptionMap.get(keyType) != null) {\n\t\t\t// Prior exception found for keytype\n\t\t\treturn null;\n\t\t }\n\n\t\t clientCredential = chooseCredential(keyType, issuers);\n\t\t if (clientCredential != null) {\n // clientCredential found\n\t\t\texceptionMap.put(keyType, null);\n\t\t\tbreak;\n \n } else {\n\t\t\texceptionMap.put(keyType,\n new GeneralSecurityException(\"Credentials not found\"));\n\t\t }\n\t\t continue;\n \n\t\t} catch (GeneralSecurityException e) {\n\t\t exceptionMap.put(keyType, e);\n\t\t} catch (SecurityException e) {\n\t\t exceptionMap.put(keyType, e);\n\t\t}\n\t }\n\t if (clientCredential == null) {\n\t\treturn null;\n\t }\n\t}\n \n\tX509Certificate cert = clientCredential.getCertificate();\n\tclientPrincipal = cert.getSubjectX500Principal();\n\tcredentialsValidUntil = Math.min(credentialsValidUntil, certificatesValidUntil(getCertificateChain(getSubject(), cert)));\n\tauthenticationPermission = getAuthenticationPermission(cert);\n\tString result = getCertificateName(clientCredential.getCertificate());\n\t\tif (logger.isLoggable(Level.FINE)) {\n\t\t logger.log(\n\t\t\tLevel.FINE,\n\t\t\t\"choose client alias for key types {0}\\nand issuers {1}\\n\" +\n\t\t\t\"returns {2}\",\n\t\t\tnew Object[] { toString(keyTypes), toString(issuers), result });\n\t\t}\n\treturn result;\n }\n\n /* -- X500TrustManager -- */\n /**\n * Override this X509TrustManager method in order to cache the server\n * principal and to continue to choose the same one.\n */\n @Override\n public synchronized void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {\n\tsuper.checkServerTrusted(chain, authType);\n\tif (serverPrincipal == null) {\n\t serverCredential = chain[0];\n\t serverPrincipal = serverCredential.getSubjectX500Principal();\n\t setPermittedRemotePrincipals(Collections.singleton(serverPrincipal));\n\t credentialsValidUntil = certificatesValidUntil(chain);\n\t} else if (!serverCredential.equals(chain[0])) {\n\t throw new CertificateException(\"Server credentials changed\");\n\t}\n }\n\n @Override\n public String[] getServerAliases(String arg0, Principal[] arg1) {\n\treturn null;\n }\n\n @Override\n public String chooseServerAlias(String arg0, Principal[] arg1, Socket arg2) {\n\treturn null;\n }\n\n /**\n * Returns the permission needed to connect to the last server principal\n * with the specified client certificate.\n */\n AuthenticationPermission getAuthenticationPermission(X509Certificate cert) {\n\tSet client = Collections.singleton(cert.getSubjectX500Principal());\n\tSet server = (serverPrincipal == null) ? null : Collections.singleton(serverPrincipal);\n\treturn new AuthenticationPermission(client, server, \"connect\");\n }\n\n /**\n * Gets the private credential for the specified X.509 certificate,\n * checking for AuthenticationPermission to connect with the last server\n * principal.\n *\n * @param cert the certificate for the local principal\n * @return the associated private credential or null if not found\n * @throws SecurityException if the access control context does not have\n *\t the proper AuthenticationPermission\n */\n @Override\n synchronized X500PrivateCredential getPrivateCredential(X509Certificate cert) {\n\treturn getPrivateCredential(cert, getAuthenticationPermission(cert));\n }\n\n /**\n * Gets the private credential for the specified X.509 certificate,\n * checking for the specified AuthenticationPermission.\n *\n * @param cert the certificate for the local principal\n * @param ap the permission needed to connect to the peer\n * @return the associated private credential or null if not found\n * @throws SecurityException if the access control context does not have\n *\t the proper AuthenticationPermission\n */\n protected X500PrivateCredential getPrivateCredential(X509Certificate cert, AuthenticationPermission ap) {\n\tSubject subject = getSubject();\n\tif (subject == null) {\n\t return null;\n\t}\n\tSecurityManager sm = System.getSecurityManager();\n\tif (sm != null) {\n\t sm.checkPermission(ap);\n\t}\n\treturn (X500PrivateCredential) AccessController.doPrivileged(new GetPrivateCredentialAction(subject, cert));\n }\n \n \n /**\n * Checks if the subject still contains the proper credentials, and the\n * current access control context has the proper AuthenticationPermission,\n * to use the current session. Callers should only call this method if\n * client authentication is being used.\n *\n * @throws SecurityException if the access control context does not have\n *\t the proper AuthenticationPermission\n * @throws UnsupportedConstraintException if the subject does not contain\n *\t the proper credentials\n */\n synchronized void checkAuthentication()\n\tthrows GeneralSecurityException\n {\n\tif (clientCredential == null) {\n\t throw new GeneralSecurityException(\n\t\t\"Client is not authenticated\");\n\t} else if (clientCredential.isDestroyed()) {\n\t throw new GeneralSecurityException(\n\t\t\"Private credentials are destroyed\");\n\t} else if (System.currentTimeMillis() > credentialsValidUntil) {\n\t throw new GeneralSecurityException(\n\t\t\"Certificates are no longer valid\");\n\t}\n\tif (subjectIsReadOnly) {\n\t SecurityManager sm = System.getSecurityManager();\n\t if (sm != null) {\n\t\tsm.checkPermission(authenticationPermission);\n\t }\n\t} else {\n\t Subject subject = getSubject();\n\t X509Certificate cert = clientCredential.getCertificate();\n\t if (getPrincipal(subject, cert) == null) {\n\t\tthrow new GeneralSecurityException(\"Missing principal\");\n\t }\n\t CertPath chain =\n\t\tgetCertificateChain(subject, cert);\n\t if (chain == null) {\n\t\tthrow new GeneralSecurityException(\n\t\t \"Missing public credentials\");\n\t }\n\t X500PrivateCredential pc = getPrivateCredential(\n\t\tcert, authenticationPermission);\n\t if (pc == null) {\n\t\tthrow new GeneralSecurityException(\n\t\t \"Missing private credentials\");\n\t } else if (!equalPrivateCredentials(clientCredential, pc)) {\n\t\tthrow new GeneralSecurityException(\n\t\t \"Wrong private credentials\");\n\t }\n\t}\n }\n}\n", "meta": {"content_hash": "527a69c207200fca5e44fa300086269f", "timestamp": "", "source": "github", "line_count": 259, "max_line_length": 122, "avg_line_length": 33.87258687258687, "alnum_prop": 0.7045480451384931, "repo_name": "pfirmstone/river-internet", "id": "01c4d486d5a17fffaaacedcc4db47435ee148cca", "size": "9363", "binary": false, "copies": "2", "ref": "refs/heads/trunk", "path": "JGDMS/jgdms-rmi-tls/src/main/java/au/net/zeus/rmi/tls/ClientSubjectKeyManager.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "2047"}, {"name": "Groff", "bytes": "863"}, {"name": "Groovy", "bytes": "35711"}, {"name": "HTML", "bytes": "4398920"}, {"name": "Java", "bytes": "33660467"}, {"name": "Makefile", "bytes": "3046"}, {"name": "Shell", "bytes": "69126"}]}} {"text": "(function(root) {\n\n//------------------------------------------------------------------//\n// Compatibility\n//------------------------------------------------------------------//\n\nvar emptyFn = function() {};\nvar console = root.console || {};\nvar document = root.document;\n\nconsole.error = console.error || emptyFn;\n\nroot.GLOBAL = root;\n\n//------------------------------------------------------------------//\n// Base64 encoding\n//------------------------------------------------------------------//\n\nfunction toBase64Digit(value) {\n\tvar digits =\n\t\t'ABCDEFGHIJKLMNOPQRSTUVWXYZ' +\n\t\t'abcdefghijklmnopqrstuvwxyz' +\n\t\t'0123456789+/';\n\treturn digits[value & 0x3F];\n}\n\nfunction strToUTF8Base64(str) {\n\n\tfunction decodeSurrogatePair(hi, lo) {\n\t\tvar resultChar = 0x010000;\n\t\tresultChar += lo - 0xDC00;\n\t\tresultChar += (hi - 0xD800) << 10;\n\t\treturn resultChar;\n\t}\n\n\tvar bytes = [0, 0, 0];\n\tvar byteIndex = 0;\n\tvar result = [];\n\n\tfunction output(s) {\n\t\tresult.push(s);\n\t}\n\n\tfunction emitBase64() {\n\n\t\tvar toDigit = toBase64Digit;\n\n\t\t// --Byte 0-- --Byte 1-- --Byte 2--\n\t\t// 1111 1122 2222 3333 3344 4444\n\n\t\tvar d1 = toDigit(bytes[0] >> 2);\n\t\tvar d2 = toDigit(\n\t\t\t((bytes[0] & 0x03) << 4) |\n\t\t\t(bytes[1] >> 4));\n\t\tvar d3 = toDigit(\n\t\t\t((bytes[1] & 0x0F) << 2) |\n\t\t\t(bytes[2] >> 6));\n\t\tvar d4 = toDigit(\n\t\t\tbytes[2] & 0x3F);\n\n\t\tif (byteIndex === 1) {\n\t\t\toutput(d1 + d2 + '==');\n\t\t}\n\t\telse if (byteIndex === 2) {\n\t\t\toutput(d1 + d2 + d3 + '=');\n\t\t}\n\t\telse {\n\t\t\toutput(d1 + d2 + d3 + d4);\n\t\t}\n\t}\n\n\tfunction emit(chr) {\n\t\tbytes[byteIndex++] = chr;\n\t\tif (byteIndex === 3) {\n\t\t\temitBase64();\n\t\t\tbytes[0] = 0;\n\t\t\tbytes[1] = 0;\n\t\t\tbytes[2] = 0;\n\t\t\tbyteIndex = 0;\n\t\t}\n\t}\n\n\tfunction emitLast() {\n\t\tif (byteIndex > 0) {\n\t\t\temitBase64();\n\t\t}\n\t}\n\n\t// Converts the string to UTF8:\n\n\tvar i, chr;\n\tvar hi, lo;\n\tfor (i = 0; i < str.length; i++) {\n\t\tchr = str.charCodeAt(i);\n\n\t\t// Test and decode surrogate pairs in the string\n\t\tif (chr >= 0xD800 && chr <= 0xDBFF) {\n\t\t\thi = chr;\n\t\t\tlo = str.charCodeAt(i + 1);\n\t\t\tif (lo >= 0xDC00 && lo <= 0xDFFF) {\n\t\t\t\tchr = decodeSurrogatePair(hi, lo);\n\t\t\t\ti++;\n\t\t\t}\n\t\t}\n\n\t\t// Encode the character as UTF-8.\n\t\tif (chr < 0x80) {\n\t\t\temit(chr);\n\t\t}\n\t\telse if (chr < 0x0800) {\n\t\t\temit((chr >> 6) | 0xC0);\n\t\t\temit(((chr >> 0) & 0x3F) | 0x80);\n\t\t}\n\t\telse if (chr < 0x10000) {\n\t\t\temit((chr >> 12) | 0xE0);\n\t\t\temit(((chr >> 6) & 0x3F) | 0x80);\n\t\t\temit(((chr >> 0) & 0x3F) | 0x80);\n\t\t}\n\t\telse if (chr < 0x110000) {\n\t\t\temit((chr >> 18) | 0xF0);\n\t\t\temit(((chr >> 12) & 0x3F) | 0x80);\n\t\t\temit(((chr >> 6) & 0x3F) | 0x80);\n\t\t\temit(((chr >> 0) & 0x3F) | 0x80);\n\t\t}\n\t}\n\n\temitLast();\n\n\treturn result.join('');\n}\n\n// VLQ Base64 encoding used in source maps\n\nfunction toVLQ(value) {\n\n\tvar vlq, result, digit;\n\n\t// The LSB in the encoded result is used to indicate the sign of\n\t// the original value\n\tvlq = value < 0 ? ((-value) << 1) + 1 : (value << 1);\n\n\t// Digits are encoded least significant first.\n\n\tresult = '';\n\tdo {\n\t\t// Each Base64 digit encodes 5 bits. The sixth bit is a\n\t\t// continuation bit.\n\t\tdigit = vlq & 0x1F;\n\t\tvlq = vlq >> 5;\n\t\tdigit = digit | (vlq > 0 ? 0x20 : 0);\n\t\tresult += toBase64Digit(digit);\n\t}\n\twhile (vlq > 0);\n\n\treturn result;\n}\n\n//------------------------------------------------------------------//\n// Source maps\n//------------------------------------------------------------------//\n\nfunction getSourceMap(file, source, lineOffset) {\n\tvar mappings = [];\n\tvar lineCount = source.split(/\\r\\n?|\\n/).length;\n\tvar map;\n\tvar curLine, i;\n\n\tvar lastOriginalLine = 0;\n\tfunction addLineMapping(originalLine) {\n\t\tvar generatedColumn = 0;\n\t\tvar sourceIndex = 0;\n\t\tvar originalColumn = 0;\n\t\tvar segment = [\n\t\t\ttoVLQ(generatedColumn),\n\t\t\ttoVLQ(sourceIndex),\n\t\t\ttoVLQ(originalLine - lastOriginalLine),\n\t\t\ttoVLQ(originalColumn)\n\t\t].join('');\n\t\tlastOriginalLine = originalLine;\n\t\tmappings.push(segment);\n\t}\n\n\tfor (i = 0; i < lineOffset; i++) {\n\t\tmappings.push('');\n\t}\n\n\tfor (i = 0; i < lineCount; i++) {\n\t\taddLineMapping(i);\n\t}\n\n\tmap = [\n\t\t'{',\n\t\t'\"version\":3,',\n\t\t'\"sources\":[',\n\t\tJSON.stringify(file),\n\t\t'],',\n\t\t'\"names\":[],',\n\t\t'\"mappings\":',\n\t\tJSON.stringify(mappings.join(';')),\n\t\t',',\n\t\t'\"sourcesContent\":[',\n\t\tJSON.stringify(source),\n\t\t']',\n\t\t'}'\n\t].join('');\n\n\treturn map;\n}\n\n//------------------------------------------------------------------//\n// Path manipulation\n//------------------------------------------------------------------//\n\nvar basePath = window.location.pathname.replace(/\\/[^\\/]*$/, '');\nvar hrefOrigin = window.location.origin;\n\nfunction concatPath(basePath, path) {\n\tvar baseParts, pathParts, i, it;\n\tvar result = [];\n\n\t// If path is absolute, ignore the base path.\n\tif (path[0] === '/') {\n\t\t// Run the concatPath to normalize path.\n\t\treturn concatPath(path, '');\n\t}\n\n\t// Remove leading and trailing slashes and split the base path.\n\t// Note that basePath is always handled as absolute.\n\tbaseParts = basePath.replace(/^\\/|\\/$/g, '').split('/');\n\n\t// Remove any trailing slash and split the path.\n\tpathParts = path.replace(/^\\/$/, '').split('/');\n\n\t// Append the parts of basePath to the result, solving . and ..\n\t// in the path.\n\tfor (i = 0; i < baseParts.length; i++) {\n\t\tit = baseParts[i];\n\t\tif (it === '.' || (i === 0 && it === '')) {\n\t\t\t// Ignore '.' or an empty part (result of an empty\n\t\t\t// basePath).\n\t\t\tcontinue;\n\t\t}\n\t\telse if (it === '..') {\n\t\t\tresult.pop();\n\t\t}\n\t\telse {\n\t\t\tresult.push(it);\n\t\t}\n\t}\n\n\t// Append the parts of path to the result, again solving . and ..\n\tfor (i = 0; i < pathParts.length; i++) {\n\t\tit = pathParts[i];\n\t\tif (it === '.' || it === '') {\n\t\t\t// Ignore '.' or an empty part (result of an empty path).\n\t\t\tcontinue;\n\t\t}\n\t\telse if (it === '..') {\n\t\t\tresult.pop();\n\t\t}\n\t\telse {\n\t\t\tresult.push(it);\n\t\t}\n\t}\n\n\treturn '/' + result.join('/');\n}\n\nfunction solvePossiblePaths(path, relativeTo) {\n\tvar isRelative = /^(\\.){0,2}\\//.test(path);\n\tvar possiblePaths = [];\n\tvar basePath, curPath;\n\n\tif (!/\\.js/i.test(path)) {\n\t\tpath += '.js';\n\t}\n\n\tif (isRelative) {\n\t\tcurPath = concatPath(relativeTo, path);\n\t\tpossiblePaths.push(curPath);\n\t}\n\telse {\n\t\tbasePath = relativeTo;\n\t\twhile (true) {\n\t\t\tcurPath = concatPath(basePath, 'node_modules');\n\t\t\tcurPath = concatPath(curPath, path);\n\t\t\tif (possiblePaths.indexOf(curPath) >= 0) {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\telse {\n\t\t\t\tpossiblePaths.push(curPath);\n\t\t\t}\n\t\t\tbasePath = concatPath(basePath, '..');\n\t\t}\n\t}\n\n\treturn possiblePaths;\n}\n\n//------------------------------------------------------------------//\n// Ajax request\n//------------------------------------------------------------------//\n\nvar requestCache = {};\nvar pendingRequests = {};\n\nfunction sendRequest(path, callback) {\n\tvar request, pending;\n\n\t// Check for a cached result\n\tif (requestCache[path]) {\n\t\tsetTimeout(function() {\n\t\t\tcallback(requestCache[path]);\n\t\t}, 0);\n\t\treturn;\n\t}\n\telse if (pendingRequests[path]) {\n\t\tpendingRequests[path].callbacks.push(callback);\n\t\treturn;\n\t}\n\n\t// If the request is not on the cache, makes the Ajax request:\n\n\tif (root.XMLHttpRequest) {\n\t\trequest = new root.XMLHttpRequest();\n\t}\n\telse if (root.ActiveXObject) {\n\t\trequest = new root.ActiveXObject('Microsoft.XMLHTTP');\n\t}\n\n\tpending = pendingRequests[path] = { callbacks: [callback] };\n\n\trequest.onreadystatechange = function() {\n\t\tvar response, callbacks, i, cb;\n\t\tif (request.readyState === 4) {\n\t\t\trequestCache[path] = response = parseResponse(request);\n\t\t\tresponse.originPath = path;\n\t\t\tcallbacks = pendingRequests[path].callbacks;\n\t\t\tdelete pendingRequests[path];\n\t\t\tfor (i = 0; i < callbacks.length; i++) {\n\t\t\t\tcb = callbacks[i];\n\t\t\t\tcb(response);\n\t\t\t}\n\t\t}\n\t};\n\n\trequest.open('GET', path, true);\n\trequest.send(null);\n}\n\nfunction parseResponse(request) {\n\tvar result = {};\n\tif (request.status === 200) {\n\t\tresult.success = true;\n\t\tresult.source = request.responseText;\n\t}\n\telse if (request.status === 404) {\n\t\tresult.success = false;\n\t\tresult.notFound = true;\n\t\tresult.error = 'Not found (404)';\n\t}\n\telse {\n\t\tresult.success = false;\n\t\tresult.error = 'Server error (' + request.status + ')';\n\t}\n\treturn result;\n}\n\n//------------------------------------------------------------------//\n// Module loading\n//------------------------------------------------------------------//\n\nvar pending = [];\nvar moduleMap = {};\nvar moduleError = {};\nvar execOrder = [];\n\nfunction getRequires(source) {\n\tvar len;\n\tvar lines;\n\tvar i, it, match;\n\tvar requires = [];\n\n\t// Strips multiline comments from source.\n\tdo {\n\t\tlen = source.length;\n\t\tsource = source.replace(/\\/\\*([^\\*]|\\*(?!\\/))*(\\*\\/|$)/, '');\n\t}\n\twhile (len !== source.length);\n\n\tlines = source.split(/\\r\\n?|\\n/);\n\tfor (i = 0; i < lines.length; i++) {\n\t\tit = lines[i];\n\t\tit = it.replace(/^\\s*((?:var)?\\s*(?:\\w+\\s*=))?\\s*/, '');\n\t\tmatch = it.match(\n\t\t\t/^require\\s*\\(\\s*(\\'|\\\")((?:(?!\\1).)+)\\1\\s*\\)\\s*(;|$)/);\n\t\tif (match) {\n\t\t\trequires.push(match[2]);\n\t\t}\n\t}\n\n\treturn requires;\n}\n\nfunction loadModule(response, request, callback) {\n\tvar originPath = response.originPath;\n\tvar pendingIndex = pending.indexOf(request);\n\tvar module;\n\tvar requireCount = 0;\n\tvar requires, i, it;\n\n\tfunction callCallbackWhenOver() {\n\t\tif (requireCount <= 0 && typeof callback === 'function') {\n\t\t\tcallback(module, request);\n\t\t}\n\t}\n\n\tfunction onRequiredLoaded(depModule, depRequest) {\n\t\tmodule.dependencies.push(depModule.path);\n\t\tmodule.requireMap[depRequest.path] = depModule.path;\n\t\trequireCount -= 1;\n\t\tcallCallbackWhenOver();\n\t}\n\n\tpending.splice(pendingIndex, 1);\n\n\tmodule = moduleMap[originPath];\n\tif (!module) {\n\t\tmodule = moduleMap[originPath] = {\n\t\t\tpath: originPath,\n\t\t\tbasePath: concatPath(originPath, '..'),\n\t\t\terror: response.success ? false : response.error,\n\t\t\trequest: request,\n\t\t\tresponse: response,\n\t\t\tsource: response.source,\n\t\t\tdependencies: [],\n\t\t\trequireMap: {},\n\t\t\tmoduleObject: {\n\t\t\t\texports: {}\n\t\t\t}\n\t\t};\n\n\t\tif (!module.error) {\n\t\t\trequires = module.requires = getRequires(module.source);\n\t\t\trequireCount = requires.length;\n\n\t\t\tfor (i = 0; i < requires.length; i++) {\n\t\t\t\tit = requires[i];\n\t\t\t\tloadFile(it, module.basePath, module.path, onRequiredLoaded);\n\t\t\t}\n\n\t\t\tcallCallbackWhenOver();\n\t\t}\n\t\telse {\n\t\t\tcallCallbackWhenOver();\n\t\t}\n\t}\n\telse {\n\t\tcallCallbackWhenOver();\n\t}\n}\n\nfunction loadFile(path, relativeTo, requestedBy, callback) {\n\n\tvar requestInfo = {\n\t\tpath: path,\n\t\tbasePath: relativeTo,\n\t\tsolvedPaths: solvePossiblePaths(path, relativeTo),\n\t\tnextIndex: 0\n\t};\n\n\tpending.push(requestInfo);\n\n\tfunction onSuccess(response) {\n\t\tloadModule(response, requestInfo, callback);\n\t}\n\n\tfunction onError(response) {\n\t\tconsole.error(\n\t\t\t\t'Failed to load `' + path +\n\t\t\t\t'` requested by `' + requestedBy +\n\t\t\t\t'`: ' + response.error);\n\t\tloadModule(response, requestInfo, callback);\n\t}\n\n\tfunction tryLoadNext() {\n\t\tvar nextPath = requestInfo.solvedPaths[requestInfo.nextIndex++];\n\t\tvar isLast = requestInfo.nextIndex >= requestInfo.solvedPaths.length;\n\n\t\t// Send the request for the next possible path.\n\t\tsendRequest(nextPath, function(response) {\n\t\t\tif (response.success) {\n\t\t\t\t// The file was successfully loaded, process the\n\t\t\t\t// result.\n\t\t\t\tonSuccess(response);\n\t\t\t}\n\t\t\telse if (response.notFound && !isLast) {\n\t\t\t\t// The file was not found, but there are other\n\t\t\t\t// possible paths, so continue trying.\n\t\t\t\ttryLoadNext();\n\t\t\t}\n\t\t\telse {\n\t\t\t\t// The request failed or the last possible path was\n\t\t\t\t// not found. Generate an error.\n\t\t\t\tonError(response);\n\t\t\t}\n\t\t});\n\t}\n\n\t// Try load the first possible path.\n\ttryLoadNext();\n}\n\n//------------------------------------------------------------------//\n// Module execution\n//------------------------------------------------------------------//\n\nfunction getModule(modulePath) {\n\tvar module = moduleMap[modulePath];\n\tif (!module) {\n\t\tthrow new Error('Module `' + modulePath + '` not loaded');\n\t}\n\treturn module;\n}\n\nfunction getLoadingOrder(module) {\n\tvar result = [];\n\tvar deps, i, it, subResult;\n\tif (module && !module.visited) {\n\t\tmodule.visited = true;\n\t\tdeps = module.dependencies;\n\t\tfor (i = 0; i < deps.length; i++) {\n\t\t\tit = getModule(deps[i]);\n\t\t\tsubResult = getLoadingOrder(it);\n\t\t\tresult = result.concat(subResult);\n\t\t}\n\t\tresult.push(module);\n\t}\n\treturn result;\n}\n\nfunction beforeModuleLoading(module) {\n}\n\nfunction afterModuleLoading(module) {\n\tif (!axloader._moduleFn) {\n\t\t// Assume a syntax error.\n\t\tmodule.syntaxError = true;\n\t}\n\tmodule.moduleFunction = axloader._moduleFn;\n\tmodule.executed = false;\n\tdelete axloader._moduleFn;\n}\n\nfunction executeModule(module, afterCallback) {\n\tvar order = getLoadingOrder(module);\n\n\tfunction loadNext() {\n\t\tvar nextModule = order.shift();\n\t\tvar script, source, sourceMap;\n\t\tif (nextModule && !nextModule.error) {\n\t\t\tbeforeModuleLoading(nextModule);\n\n\t\t\tsource = 'axloader._moduleFn = (' +\n\t\t\t\t'function(require, module, exports, global) { // ' +\n\t\t\t\tnextModule.path + '\\n' +\n\t\t\t\tnextModule.source +\n\t\t\t\t'\\n})';\n\t\t\tsource += '\\n//# sourceMappingURL=' +\n\t\t\t\t'data:application/json;charset=utf-8;base64,';\n\t\t\tsourceMap = getSourceMap(\n\t\t\t\threfOrigin + nextModule.path, nextModule.source, 1);\n\t\t\tsource += strToUTF8Base64(sourceMap);\n\n\t\t\tscript = document.createElement('script');\n\t\t\tscript.id = nextModule.path;\n\t\t\tscript.type = 'text/javascript';\n\t\t\tscript.src =\n\t\t\t\t'data:text/javascript;charset=utf-8;base64,' +\n\t\t\t\tstrToUTF8Base64(source);\n\t\t\tscript.innerHTML = source;\n\t\t\tscript.onload = function() {\n\t\t\t\tafterModuleLoading(nextModule);\n\t\t\t\tsetTimeout(loadNext, 0);\n\t\t\t};\n\t\t\tdocument.head.appendChild(script);\n\t\t}\n\t\telse if (nextModule) {\n\t\t\tnextModule.notFound = true;\n\t\t\tsetTimeout(loadNext, 0);\n\t\t}\n\t\telse {\n\t\t\tdoExecute(module);\n\t\t\tif (typeof afterCallback === 'function') {\n\t\t\t\tafterCallback();\n\t\t\t}\n\t\t}\n\t}\n\n\tfunction doExecute(theModule) {\n\n\t\tfunction require(path) {\n\t\t\tvar solvedPath = theModule.requireMap[path];\n\t\t\tvar requiredModule = solvedPath && getModule(solvedPath);\n\t\t\tif (!requiredModule) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t'require: the module `' + path + '` was not preloaded');\n\t\t\t}\n\t\t\telse if (requiredModule.notFound) {\n\t\t\t\tthrow new Error('Cannot find module \\'' + path + '\\'');\n\t\t\t}\n\t\t\telse if (requiredModule.error) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t'require: the module `' + path + '` could not be loaded: ' +\n\t\t\t\t\trequiredModule.error);\n\t\t\t}\n\n\t\t\tif (!requiredModule.executed && !requiredModule.executing) {\n\t\t\t\tdoExecute(requiredModule);\n\t\t\t}\n\n\t\t\treturn requiredModule.moduleObject.exports;\n\t\t}\n\n\t\tvar fn = theModule.moduleFunction;\n\t\tvar moduleObject = theModule.moduleObject;\n\t\tvar moduleExports = moduleObject.exports;\n\n\t\tif (theModule.syntaxError) {\n\t\t\tthrow new SyntaxError(theModule.path);\n\t\t}\n\t\telse if (theModule.notFound) {\n\t\t\tthrow new Error('module not found: ' + theModule.path);\n\t\t}\n\n\t\ttry {\n\t\t\ttheModule.executing = true;\n\t\t\tfn.call(root, require, moduleObject, moduleExports, root);\n\t\t}\n\t\tfinally {\n\t\t\ttheModule.executing = false;\n\t\t}\n\t\ttheModule.executed = true;\n\t}\n\n\tloadNext();\n}\n\n//------------------------------------------------------------------//\n// Public API\n//------------------------------------------------------------------//\n\nvar axloader = {};\n\naxloader.require = function require(path, afterCallback) {\n\tloadFile(path, basePath, 'axloader.require', function(module) {\n\t\texecuteModule(module, afterCallback);\n\t});\n};\n\naxloader.getPending = function getPending() {\n\treturn pending;\n};\n\naxloader.getModuleMap = function getModuleMap() {\n\treturn moduleMap;\n};\n\nroot.axloader = axloader;\n\n}(this));\n", "meta": {"content_hash": "4743262d432c18a51e55f21bb8262f7e", "timestamp": "", "source": "github", "line_count": 671, "max_line_length": 71, "avg_line_length": 22.758569299552907, "alnum_prop": 0.5774998362910091, "repo_name": "ricardobm/ax-loader", "id": "26a7854d505d1e5992646f05b596e9df22dda295", "size": "15271", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/ax-loader.js", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "21108"}]}} {"text": "package jugsaar12.nio.networking;\n\nimport java.net.ServerSocket;\nimport java.net.Socket;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\n\n/**\n * TODO MAX_POOL_SIZE to 1000\n *

\n * TODO show RejectedExecutionHandler -> Default AbortPolicy, CallerRunsPolicy\n */\npublic class C_ExecutorServiceMultiThreadedEchoServer {\n\n public static void main(String[] args) throws Exception {\n\n System.out.println(\"C_ExecutorServiceMultiThreadedEchoServer running\");\n\n ExecutorService es = Executors.newCachedThreadPool();\n\n try (ServerSocket ss = new ServerSocket(1337)) {\n\n while (true) {\n\n Socket s = ss.accept(); // blocking-call, never returns null!\n es.submit(() -> Util.process(s));\n }\n }\n }\n}", "meta": {"content_hash": "35befc48d23f03d692415d764e21ff48", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 78, "avg_line_length": 25.333333333333332, "alnum_prop": 0.7144736842105263, "repo_name": "jugsaar/jugsaar-meeting-12", "id": "aec0332d325665aa4bb0aec3abf2abd31679f07c", "size": "760", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "java-nio-networking/code/jugsaar12-java-nio-networking/src/main/java/jugsaar12/nio/networking/C_ExecutorServiceMultiThreadedEchoServer.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "17468"}]}} {"text": "import pyxb.binding.generate\nimport pyxb.utils.domutils\nfrom xml.dom import Node\n\nimport os.path\nschema_path = os.path.abspath(os.path.join(os.path.dirname(__file__),\n '../schemas/po1.xsd'))\ncode = pyxb.binding.generate.GeneratePython(schema_location=schema_path)\n\n#file('code.py', 'w').write(code)\nrv = compile(code, 'test', 'exec')\neval(rv)\n\nfrom pyxb.exceptions_ import *\n\nfrom pyxb.utils import domutils\n\ndef ToDOM (instance, tag=None, dom_support=None):\n return instance.toDOM(dom_support).documentElement\n\nimport unittest\n\nclass TestPO1 (unittest.TestCase):\n street_content = '''95 Main St.\nAnytown, AS 12345-6789'''\n street_xml = '%s' % (street_content,)\n street_dom = pyxb.utils.domutils.StringToDOM(street_xml).documentElement\n\n address1_xml = 'Customer95 Main St'\n address2_xml = 'Sugar Mama24 E. Dearling Ave.'\n\n def tearDown (self):\n pyxb.RequireValidWhenGenerating(True)\n pyxb.RequireValidWhenParsing(True)\n\n def testPythonElementSimpleContent (self):\n elt = USAddress._ElementMap['street'].elementBinding()(self.street_content)\n self.assertEqual(self.street_content, elt)\n self.assertEqual(ToDOM(elt).toxml(\"utf-8\"), self.street_xml)\n\n def testDOMElementSimpleContent (self):\n elt = USAddress._ElementMap['street'].elementBinding().createFromDOM(self.street_dom)\n self.assertEqual(ToDOM(elt).toxml(\"utf-8\"), self.street_xml)\n\n def testPythonElementComplexContent_Element (self):\n addr = USAddress(name='Customer', street='95 Main St')\n self.assertEqual('95 Main St', addr.street)\n addr = USAddress('Customer', '95 Main St')\n self.assertEqual('95 Main St', addr.street)\n addr.street = '43 West Oak'\n self.assertEqual('43 West Oak', addr.street)\n #self.assertEqual('%s' % (self.address1_xml,), ToDOM(addr, tag='s').toxml(\"utf-8\"))\n\n def testDOM_CTD_element (self):\n # NB: USAddress is a CTD, not an element.\n xml = '%s' % (self.address1_xml,)\n dom = pyxb.utils.domutils.StringToDOM(xml)\n addr2 = USAddress.Factory(_dom_node=dom.documentElement)\n #self.assertEqual(xml, ToDOM(addr2, tag='shipTo').toxml(\"utf-8\"))\n\n def testPurchaseOrder (self):\n po = purchaseOrder(shipTo=USAddress(name='Customer', street='95 Main St'),\n billTo=USAddress(name='Sugar Mama', street='24 E. Dearling Ave'),\n comment='Thanks!')\n xml = ToDOM(po).toxml(\"utf-8\")\n xml1 = 'Customer95 Main StSugar Mama24 E. Dearling AveThanks!'\n self.assertEqual(xml, xml1)\n\n dom = pyxb.utils.domutils.StringToDOM(xml)\n po2 = purchaseOrder.createFromDOM(dom.documentElement)\n self.assertEqual(xml1, ToDOM(po2).toxml(\"utf-8\"))\n loc = po2.shipTo._location()\n self.assertTrue((not isinstance(loc, pyxb.utils.utility.Locatable_mixin)) or (58 == loc.columnNumber))\n loc = po2.billTo.name._location()\n self.assertTrue((not isinstance(loc, pyxb.utils.utility.Locatable_mixin)) or (131 == loc.columnNumber))\n\n po2 = CreateFromDocument(xml)\n self.assertEqual(xml1, ToDOM(po2).toxml(\"utf-8\"))\n loc = po2.shipTo._location()\n self.assertTrue((not isinstance(loc, pyxb.utils.utility.Locatable_mixin)) or (58 == loc.columnNumber))\n loc = po2.billTo.name._location()\n self.assertTrue((not isinstance(loc, pyxb.utils.utility.Locatable_mixin)) or (131 == loc.columnNumber))\n\n\n xml2 = 'Customer95 Main StSugar Mama24 E. Dearling AveThanks!'\n bds = pyxb.utils.domutils.BindingDOMSupport()\n bds.setDefaultNamespace(Namespace)\n self.assertEqual(xml2, ToDOM(po2, dom_support=bds).toxml(\"utf-8\"))\n\n def testGenerationValidation (self):\n ship_to = USAddress('Robert Smith', 'General Delivery')\n po = purchaseOrder(ship_to)\n self.assertEqual('General Delivery', po.shipTo.street)\n self.assertTrue(po.billTo is None)\n\n self.assertTrue(pyxb.RequireValidWhenGenerating())\n self.assertRaises(pyxb.DOMGenerationError, po.toxml)\n try:\n pyxb.RequireValidWhenGenerating(False)\n self.assertFalse(pyxb.RequireValidWhenGenerating())\n xmls = po.toxml(\"utf-8\", root_only=True)\n self.assertEqual('General DeliveryRobert Smith', xmls)\n finally:\n pyxb.RequireValidWhenGenerating(True)\n self.assertRaises(pyxb.UnrecognizedContentError, CreateFromDocument, xmls)\n self.assertTrue(pyxb.RequireValidWhenParsing())\n try:\n pyxb.RequireValidWhenParsing(False)\n self.assertFalse(pyxb.RequireValidWhenParsing())\n po2 = CreateFromDocument(xmls)\n finally:\n pyxb.RequireValidWhenParsing(True)\n self.assertEqual('General Delivery', po2.shipTo.street)\n self.assertTrue(po2.billTo is None)\n\nif __name__ == '__main__':\n unittest.main()\n \n \n", "meta": {"content_hash": "919acf526bf392f33f5376056b08089f", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 269, "avg_line_length": 46.739495798319325, "alnum_prop": 0.6650485436893204, "repo_name": "jonfoster/pyxb1", "id": "bf3c9d2f0c5635eae42a36b08b6241bea6e6e94f", "size": "5562", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/drivers/test-po1.py", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Python", "bytes": "1564427"}, {"name": "Shell", "bytes": "18946"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.Linq;\nusing System.Text;\nusing System.Threading.Tasks;\nusing Newtonsoft.Json;\n\nnamespace VJeek.Microdata.Intangible.StructuredValue\n{\n\tpublic partial class OpeningHoursSpecification : StructuredValue\n\t{\n\t\t[JsonProperty(\"closes\")]\n\t\tpublic TimeSpan Closes { get; set; }\n\n\t\t[JsonProperty(\"dayOfWeek\")]\n\t\tpublic DayOfWeek DayOfWeek { get; set; }\n\n\t\t[JsonProperty(\"opens\")]\n\t\tpublic TimeSpan Opens { get; set; }\n\n\t\t[JsonProperty(\"validFrom\")]\n\t\tpublic DateTime ValidFrom { get; set; }\n\n\t\t[JsonProperty(\"validThrough\")]\n\t\tpublic DateTime ValidThrough { get; set; }\n\t}\n}\n", "meta": {"content_hash": "ebe952e1b2b29a2152f4caea184b6845", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 65, "avg_line_length": 23.22222222222222, "alnum_prop": 0.7384370015948963, "repo_name": "vjeek/microdata", "id": "eee2c6ba0e8520d9bd949baba07a99f7c5b3b95c", "size": "629", "binary": false, "copies": "1", "ref": "refs/heads/development", "path": "Sources/VJeek.Microdata/Intangible/StructuredValue/OpeningHoursSpecification.cs", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "746"}, {"name": "C#", "bytes": "19991"}, {"name": "PowerShell", "bytes": "95259"}]}} {"text": "The GraphQL module allows client applications to query the content handled by an Orchard website. \nIt enables the GraphiQL Explorer view to test GraphQL queries, and provides HTTP endpoints to send client queries.\n\n## HTTP Methods, Headers, and Body\n\n### GET request\n\nWhen receiving an HTTP GET request, the GraphQL query should be specified in the \"query\" query string. For example, if we wanted to execute the following GraphQL query:\n\n```graphql\n{\n me {\n name\n }\n}\n```\n\nThis request could be sent via an HTTP GET like so:\n\n`http://myapi/graphql?query={me{name}}`\n\nQuery variables can be sent as a JSON-encoded string in an additional query parameter called variables. If the query contains several named operations, an operationName query parameter can be used to control which one should be executed.\n\n### POST request \n\n#### application/json content type\n\nA standard GraphQL POST request should use the `application/json` content-type header, and include a JSON-encoded body of the following form:\n\n```graphql\n{\n \"query\": \"...\",\n \"operationName\": \"...\",\n \"variables\": { \"myVariable\": \"someValue\", ... }\n}\n```\n\n`operationName` and `variables` are optional fields. `operationName` is only required if multiple operations are present in the query.\n\n#### application/graphql content type\n\nAnother option is to use the `application/graphql` content-type header, and the HTTP POST body contents is treated as the GraphQL query string.\n\n#### query string\n\nIn addition to the above, If the \"query\" query string parameter is present (as in the GET example above), it will be parsed and handled in the same way as the HTTP GET case.\n\n### Response\n\nRegardless of the method by which the query and variables were sent, the response is returned in the body of the request in JSON format. \nA query might result in some data and some errors, and those are returned in a JSON object of the form:\n\n```json\n{\n \"data\": { ... },\n \"errors\": [ ... ]\n}\n```\n\nIf there were no errors returned, the \"errors\" field is not present on the response. \nIf no data is returned the \"data\" field is only included if the error occurred during execution.\n\n## Authentication\n\nExecuting a GraphQL query requires the issuer to have the `ExecuteGraphQL` permission. Like any other API in Orchard Core, the GraphQL API supports \ncookie and OAuth 2.0 authentication. This means it's compatible with the OpenId module and supports JSON Web Token (JWT).\n\nBy default anonymous users are not able to execute a GraphQL query.\n\n## Configuration\n\nIt's possible to configure graphql options for exposing exceptions and max depth, max complexity and field impact.\n\nConfiguration is done via the standard shell configuration, as follows.\n\n```json\n{\n \"OrchardCore\": {\n \"OrchardCore_Apis_GraphQL\": {\n \"ExposeExceptions\": true,\n \"MaxDepth\": 50, \n \"MaxComplexity\": 100, \n \"FieldImpact\": 2.0,\n \"DefaultNumberOfResults\": 100,\n \"MaxNumberOfResults\": 1000,\n \"MaxNumberOfResultsValidationMode\": \"Default\"\n }\n }\n}\n```\n\n*ExposeExceptions (bool, Default: false for production, true for development)*\n\nIf set to true stack traces are exposed to graphql clients\n\n*DefaultNumberOfResults (int, Default: 100)*\nThe default number of results returned by all paged fields/types.\n\n*MaxNumberOfResults (int, Default: 1000)*\nThe maximum number of results returned by all paged fields/types.\n\n*MaxNumberOfResultsValidationMode (enum, Values: Default|Enabled|Disabled, Default: Default)()*\nSpecify the validation behaviour if the max number of results is exceeded in a pager parameter\n\n* Default - In production info will be logged and only the max number of results will be returned. In development a graphql validation error will be raised.\n* Enabled - a graphql validation error will be raised\n* Disabled - Info will be logged and only the max number of results will be returned\n\n*MaxDepth (int?, Default: 20)*\n\nEnforces the total maximum nesting across all queries in a request.\n\n*MaxComplexity (int?, Default: null)*\n\n*FieldImpact (double?, Default: null)*\n\nFor more information on MaxDepth, MaxComplexity, FieldImpact & protecting against malicious queries view the graphql-dot-net documentation at \n", "meta": {"content_hash": "8ee80330a70b8aded11c89b2578ddc87", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 237, "avg_line_length": 36.43589743589744, "alnum_prop": 0.7522871217452498, "repo_name": "xkproject/Orchard2", "id": "2a44f43220f1cc656cf5d84f218eeb9a33797940", "size": "4308", "binary": false, "copies": "3", "ref": "refs/heads/master_PCCOM", "path": "src/docs/reference/modules/Apis.GraphQL/README.md", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C#", "bytes": "7779422"}, {"name": "CSS", "bytes": "2900240"}, {"name": "Dockerfile", "bytes": "424"}, {"name": "HTML", "bytes": "1472436"}, {"name": "JavaScript", "bytes": "2184254"}, {"name": "Liquid", "bytes": "43273"}, {"name": "PHP", "bytes": "2484"}, {"name": "PowerShell", "bytes": "142165"}, {"name": "Pug", "bytes": "55503"}, {"name": "SCSS", "bytes": "215570"}, {"name": "TypeScript", "bytes": "41644"}]}} {"text": "Namespace Ribbons.Document.Create\n\n Public Class FromProfile\n Inherits RibbonButtonBase\n\n Public Sub New()\n _Image = \"\"\n _Order = 1\n _Text = \"From Profile\"\n _ToolTip = \"\"\n End Sub\n\n Protected Friend Overrides Sub OnClick()\n\n End Sub\n\n Protected Friend Overrides Sub OnIsEnabled()\n\n End Sub\n\n End Class\n\nEnd Namespace\n\n", "meta": {"content_hash": "81adc258cf3c3d2de8198ac49cb880ff", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 52, "avg_line_length": 17.375, "alnum_prop": 0.5539568345323741, "repo_name": "nublet/DMS", "id": "51e57b874a4c962bdaa2f2ba00ff815d1485c81b", "size": "417", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "DMS.Forms/Ribbons/Items/Document/Create/FromProfile.vb", "mode": "33188", "license": "mit", "language": [{"name": "Visual Basic", "bytes": "2291940"}]}} {"text": "package com.dfyy.bussiness;\n\nimport java.io.Serializable;\nimport java.util.Date;\n\nimport javax.persistence.Column;\nimport javax.persistence.Entity;\nimport javax.persistence.FetchType;\nimport javax.persistence.GeneratedValue;\nimport javax.persistence.Id;\nimport javax.persistence.JoinColumn;\nimport javax.persistence.ManyToOne;\nimport javax.persistence.Table;\nimport javax.xml.bind.annotation.XmlAccessType;\nimport javax.xml.bind.annotation.XmlAccessorType;\nimport javax.xml.bind.annotation.XmlElement;\nimport javax.xml.bind.annotation.XmlRootElement;\nimport javax.xml.bind.annotation.XmlTransient;\nimport javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;\n\nimport org.hibernate.annotations.GenericGenerator;\n\nimport com.dfyy.util.JaxbDateSerializer;\n\n@Entity\n@Table(name = \"user\")\n@XmlRootElement\n@XmlAccessorType(XmlAccessType.NONE)\npublic class User implements Serializable {\n\tprivate static final long serialVersionUID = 8517308475246169441L;\n\n\t@Id\n\t@Column(name = \"id\")\n\t@GenericGenerator(name = \"idGenerator\", strategy = \"assigned\")\n\t@GeneratedValue(generator = \"idGenerator\")\n\t@XmlElement\n\tprivate String id;\n\n\t@Column(name = \"phone\")\n\t@XmlElement\n\tprivate String phone;\n\n\t@Column(name = \"password\")\n\t@XmlTransient\n\tprivate String password;\n\n\t@Column(name = \"alias\")\n\t@XmlElement\n\tprivate String alias;\n\n\t@Column(name = \"thumbnail\")\n\t@XmlElement\n\tprivate String thumbnail;\n\n\t@Column(name = \"address\")\n\t@XmlElement\n\tprivate String address;\n\n\t@Column(name = \"point\")\n\t@XmlElement\n\tprivate Integer point;\n\n\t@Column(name = \"description\")\n\t@XmlElement\n\tprivate String description;\n\n\t@Column(name = \"status\")\n\t@XmlElement\n\tprivate Integer status;\n\n\t@Column(name = \"x\")\n\t@XmlElement\n\tprivate Double x;\n\n\t@Column(name = \"y\")\n\t@XmlElement\n\tprivate Double y;\n\n\t@Column(name = \"currency\")\n\t@XmlElement\n\tprivate Integer currency;\n\t\n\t\n\t@Column(name = \"money\")\n\t@XmlElement\n\tprivate Integer money;\n\n\t@Column(name = \"scoring\")\n\t@XmlElement\n\tprivate Double scoring;\n\n\t@Column(name = \"tjcode\")\n\t@XmlElement\n\tprivate String tjcode;\n\t\n\t@Column(name = \"tjcoin\")\n\t@XmlElement\n\tprivate Integer tjcoin;\n\t\n\t\n\t@Column(name = \"time\")\n\t@XmlJavaTypeAdapter(JaxbDateSerializer.class)\n\tprivate Date time;\n\n\t@ManyToOne(fetch = FetchType.EAGER)\n\t@JoinColumn(name = \"level\")\n\t@XmlElement\n\tprivate UserLevel level;\n\n\t@ManyToOne(fetch = FetchType.EAGER)\n\t@JoinColumn(name = \"aid\")\n\t@XmlElement\n\tprivate Area area;\n\t\n\t@Column(name = \"activity\")\n\tprivate Integer activity;\n\t\n\t@Column(name = \"teamwork\")\n\t@XmlElement\n\tprivate boolean teamwork;\n\t\n\t\n\t@Column(name = \"acceptcoupon\")\n\t@XmlElement\n\tprivate boolean acceptCoupon;\n\n\tpublic User() {\n\n\t}\n\n\tpublic String getId() {\n\t\treturn id;\n\t}\n\n\tpublic void setId(String id) {\n\t\tthis.id = id;\n\t}\n\n\tpublic String getPhone() {\n\t\treturn phone;\n\t}\n\n\tpublic void setPhone(String phone) {\n\t\tthis.phone = phone;\n\t}\n\n\tpublic String getPassword() {\n\t\treturn password;\n\t}\n\n\tpublic void setPassword(String password) {\n\t\tthis.password = password;\n\t}\n\n\tpublic String getAlias() {\n\t\treturn alias;\n\t}\n\n\tpublic void setAlias(String alias) {\n\t\tthis.alias = alias;\n\t}\n\n\tpublic String getThumbnail() {\n\t\treturn thumbnail;\n\t}\n\n\tpublic void setThumbnail(String thumbnail) {\n\t\tthis.thumbnail = thumbnail;\n\t}\n\n\tpublic String getAddress() {\n\t\treturn address;\n\t}\n\n\tpublic void setAddress(String address) {\n\t\tthis.address = address;\n\t}\n\n\tpublic Integer getPoint() {\n\t\treturn point;\n\t}\n\n\tpublic void setPoint(Integer point) {\n\t\tthis.point = point;\n\t}\n\n\tpublic String getDescription() {\n\t\treturn description;\n\t}\n\n\tpublic void setDescription(String description) {\n\t\tthis.description = description;\n\t}\n\n\tpublic Integer getStatus() {\n\t\treturn status;\n\t}\n\n\tpublic void setStatus(Integer status) {\n\t\tthis.status = status;\n\t}\n\n\tpublic Double getX() {\n\t\treturn x;\n\t}\n\n\tpublic void setX(Double x) {\n\t\tthis.x = x;\n\t}\n\n\tpublic Double getY() {\n\t\treturn y;\n\t}\n\n\tpublic void setY(Double y) {\n\t\tthis.y = y;\n\t}\n\n\tpublic Integer getCurrency() {\n\t\treturn currency;\n\t}\n\n\tpublic void setCurrency(Integer currency) {\n\t\tthis.currency = currency;\n\t}\n\n\tpublic Double getScoring() {\n\t\treturn scoring;\n\t}\n\n\tpublic void setScoring(Double scoring) {\n\t\tthis.scoring = scoring;\n\t}\n\n\tpublic Date getTime() {\n\t\treturn time;\n\t}\n\n\tpublic void setTime(Date time) {\n\t\tthis.time = time;\n\t}\n\n\tpublic UserLevel getLevel() {\n\t\treturn level;\n\t}\n\n\tpublic void setLevel(UserLevel level) {\n\t\tthis.level = level;\n\t}\n\n\tpublic Area getArea() {\n\t\treturn area;\n\t}\n\n\tpublic void setArea(Area area) {\n\t\tthis.area = area;\n\t}\n\n\tpublic String getTjcode() {\n\t\treturn tjcode;\n\t}\n\n\tpublic void setTjcode(String tjcode) {\n\t\tthis.tjcode = tjcode;\n\t}\n\t\n\tpublic Integer getActivity() {\n\t\treturn activity;\n\t}\n\n\tpublic void setActivity(Integer activity) {\n\t\tthis.activity = activity;\n\t}\n\n\t@XmlElement\n\tpublic String getIdentifier() {\n\t\tif (activity == null) {\n\t\t\treturn \"\u65b0\u4eba\";\n\t\t} else if (activity >= 2000) {\n\t\t\treturn \"\u7279\u7ea7\";\n\t\t} else if (activity >= 1000 && activity <= 1999) {\n\t\t\treturn \"\u9ad8\u7ea7\";\n\t\t} else if (activity >= 500 && activity <= 999) {\n\t\t\treturn \"\u4e2d\u7ea7\";\n\t\t} else if (activity >= 200 && activity <= 499) {\n\t\t\treturn \"\u521d\u7ea7\";\n\t\t}\n\t\treturn \"\u65b0\u4eba\";\n\t}\n\n\tpublic boolean isTeamwork() {\n\t\treturn teamwork;\n\t}\n\n\tpublic void setTeamwork(boolean teamwork) {\n\t\tthis.teamwork = teamwork;\n\t}\n\t\n\tpublic Integer getTjcoin() {\n\t\treturn tjcoin;\n\t}\n\n\tpublic void setTjcoin(Integer tjcoin) {\n\t\tthis.tjcoin = tjcoin;\n\t}\n\n\tpublic boolean isAcceptCoupon() {\n\t\treturn acceptCoupon;\n\t}\n\n\tpublic void setAcceptCoupon(boolean acceptCoupon) {\n\t\tthis.acceptCoupon = acceptCoupon;\n\t}\n\n\tpublic Integer getMoney() {\n\t\treturn money;\n\t}\n\n\tpublic void setMoney(Integer money) {\n\t\tthis.money = money;\n\t}\n\t\n}\n", "meta": {"content_hash": "09961152c255dd2b8ffd2e6fccc1ddbc", "timestamp": "", "source": "github", "line_count": 323, "max_line_length": 67, "avg_line_length": 17.340557275541794, "alnum_prop": 0.7064809855382967, "repo_name": "secondflying/dfyy", "id": "b7211ffdebc40d47216eaaef59f1a68cb1ada1fc", "size": "5625", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/dfyy/bussiness/User.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ActionScript", "bytes": "70896"}, {"name": "Batchfile", "bytes": "113"}, {"name": "CSS", "bytes": "215364"}, {"name": "Java", "bytes": "2098669"}, {"name": "JavaScript", "bytes": "1312338"}, {"name": "PHP", "bytes": "1193"}]}} {"text": "In order to translate the labels of the families and attributes, as well as some few other things, you only need to\ncreate a standard Symfony translation file with the proper syntax.\n\n| WARNING |\n| ------- |\n| Data translation is done using [*EAV* context](09-context.md) |\n\n### Translating Families\n\nThe label of a family can be directly provided in the configuration of the family by setting the \"label\" configuration\nkey. This is not recommended as it will not allow you to translate the attribute in multiple languages.\n\nThe translation system will look for this keys in order to translate a family:\n\n````yml\neav:\n family:\n :\n label:

\n \n
\n
\n
\n \u00ab Up\n

\n classical-realizability\n \n 8.7.0\n Not compatible\n \n

\n

(2020-07-19 14:21:56 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-num            base        Num library distributed with the OCaml compiler\nbase-threads        base\nbase-unix           base\ncamlp5              7.12        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\ncoq                 8.5.1       Formal proof management system.\nnum                 0           The Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.05.0      The OCaml compiler (virtual package)\nocaml-base-compiler 4.05.0      Official 4.05.0 release\nocaml-config        1           OCaml Switch Configuration\n# opam file:\nopam-version: "2.0"\nmaintainer: "Hugo.Herbelin@inria.fr"\nhomepage: "https://github.com/coq-contribs/classical-realizability"\nlicense: "BSD"\nbuild: [make "-j%{jobs}%"]\ninstall: [make "install"]\nremove: ["rm" "-R" "%{lib}%/coq/user-contrib/ClassicalRealizability"]\ndepends: [\n  "ocaml"\n  "coq" {>= "8.7" & < "8.8~"}\n]\ntags: [ "keyword: classical realizability" "keyword: Krivine's realizability" "keyword: primitive datatype" "keyword: non determinism" "keyword: quote" "keyword: axiom of countable choice" "keyword: real numbers" "category: Mathematics/Logic/Foundations" ]\nauthors: [ "Lionel Rieg <lionel.rieg@ens-lyon.org>" ]\nbug-reports: "https://github.com/coq-contribs/classical-realizability/issues"\ndev-repo: "git+https://github.com/coq-contribs/classical-realizability.git"\nsynopsis: "Krivine's classical realizability"\ndescription: """\nThe aim of this Coq library is to provide a framework for checking\nproofs in Krivine's classical realizability for second-order Peano arithmetic.\nIt is designed to be as extensible as the original theory by Krivine and to\nsupport on-the-fly extensions by new instructions with their evaluation\nrules."""\nflags: light-uninstall\nurl {\n  src:\n    "https://github.com/coq-contribs/classical-realizability/archive/v8.7.0.tar.gz"\n  checksum: "md5=6299c2ee7d52c1535eece3376983263c"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-classical-realizability.8.7.0 coq.8.5.1
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.5.1).\nThe following dependencies couldn't be met:\n  - coq-classical-realizability -> coq >= 8.7\nYour request can't be satisfied:\n  - No available version of coq satisfies the constraints\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-classical-realizability.8.7.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub. \u00a9 Guillaume Claret.\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "6953417c96203c08c70ca885216d50c6", "timestamp": "", "source": "github", "line_count": 168, "max_line_length": 340, "avg_line_length": 43.81547619047619, "alnum_prop": 0.5651406058959381, "repo_name": "coq-bench/coq-bench.github.io", "id": "7a2fb7424f897c58b824ccbe499d4bd7f93f33f2", "size": "7363", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.05.0-2.0.6/released/8.5.1/classical-realizability/8.7.0.html", "mode": "33188", "license": "mit", "language": []}} {"text": "import React from 'react'\nimport '!style!css!react-loading-bar/dist/index.css'\nexport default function Loading() {\n return (\n
\n \n Loading...\n
\n )\n}\n", "meta": {"content_hash": "79f42d9ca7021bd62d432c1472260f62", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 68, "avg_line_length": 27.9, "alnum_prop": 0.6415770609318996, "repo_name": "moimikey/react-boilerplate", "id": "0c6503845d209715b51d2b93dbf7650d78360700", "size": "279", "binary": false, "copies": "1", "ref": "refs/heads/redux", "path": "src/app/components/Loading.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "4797"}, {"name": "HTML", "bytes": "3946"}, {"name": "JavaScript", "bytes": "29033"}]}} {"text": "from __future__ import unicode_literals\nfrom django.conf import settings\nfrom django.contrib.contenttypes import generic\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.db import models\nfrom django.dispatch.dispatcher import receiver\nfrom model_utils.models import TimeStampedModel\nfrom registration.signals import user_registered\n\n_all = [\"Asset\", \"Profile\", \"user_registered_callback\"]\n__author__ = \"pmeier82\"\n\n\n# PROFILE\n\nclass Profile(models.Model):\n \"\"\"user profile model\"\"\"\n\n class Meta:\n app_label = \"base\"\n\n user = models.OneToOneField(\n settings.AUTH_USER_MODEL,\n unique=True)\n title = models.CharField(\n max_length=255,\n default=\"\",\n )\n affiliation = models.CharField(\n max_length=255,\n default=\"\",\n )\n research_area = models.TextField(\n default=\"\"\n )\n # TODO: build a meaningful profile\n\n # special\n def __str__(self):\n return self.user.username\n\n def __unicode__(self):\n return unicode(self.__str__())\n\n\ndef user_registered_callback(user, request, **kwargs):\n profile, is_new = Profile.objects.get_or_create(user=user)\n profile.title = request.POST.get(\"title\", \"test\")\n profile.save()\n\n\nuser_registered.connect(user_registered_callback)\n\n# ASSET\n\ndef UPLOAD_TO_HANDLER(obj, fname):\n folder = getattr(obj, \"kind\", \"default\")\n return \"{}/{}\".format(folder, fname)\n\n\nclass Asset(TimeStampedModel):\n \"\"\"generic file asset model\"\"\"\n\n class Meta:\n app_label = \"base\"\n\n UPLOAD_TO = \"default\"\n\n # fields\n name = models.CharField(max_length=255, unique=False)\n data_orig_name = models.CharField(max_length=255, unique=False)\n data = models.FileField(upload_to=UPLOAD_TO_HANDLER)\n kind = models.CharField(max_length=255, unique=False, null=False, default=UPLOAD_TO)\n\n # generic foreign key\n content_type = models.ForeignKey(ContentType, null=True)\n object_id = models.PositiveIntegerField(null=True)\n content_object = generic.GenericForeignKey()\n\n # special methods\n def __unicode__(self):\n return unicode(\"{}: {}\".format(self.__class__.__name__, self.name))\n\n # django special methods\n @models.permalink\n def get_absolute_url(self):\n return \"asset:serve\", (self.pk,), {}\n\n @models.permalink\n def get_delete_url(self):\n return \"asset:delete\", (self.pk,), {}\n\n # interface\n def save(self, *args, **kwargs):\n super(Asset, self).save(*args, **kwargs)\n\n def delete(self, *args, **kwargs):\n super(Asset, self).delete(*args, **kwargs)\n\n\n@receiver(models.signals.pre_delete, sender=Asset)\ndef asset_file_delete(sender, instance, **kwargs):\n instance.data.delete()\n\n\nif __name__ == \"__main__\":\n pass\n", "meta": {"content_hash": "85f7dee85ba6de3d5a267ea68a913604", "timestamp": "", "source": "github", "line_count": 107, "max_line_length": 88, "avg_line_length": 25.757009345794394, "alnum_prop": 0.6556603773584906, "repo_name": "pmeier82/spike_gnode", "id": "2411abbf4b4c2c62fd144aaa9261d587c405219b", "size": "2781", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "base/models.py", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "11360"}, {"name": "HTML", "bytes": "56070"}, {"name": "JavaScript", "bytes": "734"}, {"name": "Nginx", "bytes": "4758"}, {"name": "Python", "bytes": "37001"}, {"name": "Shell", "bytes": "2475"}]}} {"text": "\npackage org.eclipse.lsp4j;\n\nimport java.util.List;\nimport org.eclipse.lsp4j.Position;\nimport org.eclipse.lsp4j.TextDocumentIdentifier;\nimport org.eclipse.lsp4j.WorkDoneProgressAndPartialResultParams;\nimport org.eclipse.lsp4j.jsonrpc.validation.NonNull;\nimport org.eclipse.lsp4j.util.Preconditions;\nimport org.eclipse.xtext.xbase.lib.Pure;\nimport org.eclipse.xtext.xbase.lib.util.ToStringBuilder;\n\n/**\n * A parameter literal used in selection range requests.\n *

\n * Since 3.15.0\n */\n@SuppressWarnings(\"all\")\npublic class SelectionRangeParams extends WorkDoneProgressAndPartialResultParams {\n /**\n * The text document.\n */\n @NonNull\n private TextDocumentIdentifier textDocument;\n \n /**\n * The positions inside the text document.\n */\n @NonNull\n private List positions;\n \n public SelectionRangeParams() {\n }\n \n public SelectionRangeParams(@NonNull final TextDocumentIdentifier textDocument, @NonNull final List positions) {\n this.textDocument = Preconditions.checkNotNull(textDocument, \"textDocument\");\n this.positions = Preconditions.>checkNotNull(positions, \"positions\");\n }\n \n /**\n * The text document.\n */\n @Pure\n @NonNull\n public TextDocumentIdentifier getTextDocument() {\n return this.textDocument;\n }\n \n /**\n * The text document.\n */\n public void setTextDocument(@NonNull final TextDocumentIdentifier textDocument) {\n this.textDocument = Preconditions.checkNotNull(textDocument, \"textDocument\");\n }\n \n /**\n * The positions inside the text document.\n */\n @Pure\n @NonNull\n public List getPositions() {\n return this.positions;\n }\n \n /**\n * The positions inside the text document.\n */\n public void setPositions(@NonNull final List positions) {\n this.positions = Preconditions.checkNotNull(positions, \"positions\");\n }\n \n @Override\n @Pure\n public String toString() {\n ToStringBuilder b = new ToStringBuilder(this);\n b.add(\"textDocument\", this.textDocument);\n b.add(\"positions\", this.positions);\n b.add(\"workDoneToken\", getWorkDoneToken());\n b.add(\"partialResultToken\", getPartialResultToken());\n return b.toString();\n }\n \n @Override\n @Pure\n public boolean equals(final Object obj) {\n if (this == obj)\n return true;\n if (obj == null)\n return false;\n if (getClass() != obj.getClass())\n return false;\n if (!super.equals(obj))\n return false;\n SelectionRangeParams other = (SelectionRangeParams) obj;\n if (this.textDocument == null) {\n if (other.textDocument != null)\n return false;\n } else if (!this.textDocument.equals(other.textDocument))\n return false;\n if (this.positions == null) {\n if (other.positions != null)\n return false;\n } else if (!this.positions.equals(other.positions))\n return false;\n return true;\n }\n \n @Override\n @Pure\n public int hashCode() {\n final int prime = 31;\n int result = super.hashCode();\n result = prime * result + ((this.textDocument== null) ? 0 : this.textDocument.hashCode());\n return prime * result + ((this.positions== null) ? 0 : this.positions.hashCode());\n }\n}\n", "meta": {"content_hash": "1fc19c21c3959ccaf37336d3bc383346", "timestamp": "", "source": "github", "line_count": 116, "max_line_length": 124, "avg_line_length": 27.396551724137932, "alnum_prop": 0.6913152926368785, "repo_name": "smarr/SOMns-vscode", "id": "ca95621514251184457f1372dec09436068539dd", "size": "3598", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/org.eclipse.lsp4j-gen/org/eclipse/lsp4j/SelectionRangeParams.java", "mode": "33188", "license": "mit", "language": [{"name": "ANTLR", "bytes": "12820"}, {"name": "Java", "bytes": "1782848"}, {"name": "Shell", "bytes": "182"}, {"name": "Slash", "bytes": "241"}, {"name": "TypeScript", "bytes": "26804"}, {"name": "Vim Script", "bytes": "3730"}, {"name": "Xtend", "bytes": "197426"}]}} {"text": "package com.github.mortido.extra.behavioral.nullObject;\n\npublic class NullUser extends User {\n @Override\n public String getName() {\n return \"User doesn't exist\";\n }\n\n @Override\n public int getId() {\n return -1;\n }\n}\n", "meta": {"content_hash": "13680e8fb0ec1fd4f5088b9f8c0ac55a", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 55, "avg_line_length": 19.076923076923077, "alnum_prop": 0.625, "repo_name": "mortido/DesignPatterns-Java", "id": "56cf92ed5e528869f70fcc4d8bd985ba8f3b2836", "size": "248", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/github/mortido/extra/behavioral/nullObject/NullUser.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "72591"}]}} {"text": "import { Arr, Optional } from '@ephox/katamari';\nimport { SugarElement } from '@ephox/sugar';\n\nimport * as Structs from '../api/Structs';\nimport * as GridRow from '../model/GridRow';\nimport { CellElement, CompElm } from '../util/TableTypes';\n\ntype Subst = () => SugarElement;\n\n// substitution: () -> item\nconst merge = (grid: Structs.RowCells[], bounds: Structs.Bounds, comparator: CompElm, substitution: Subst): Structs.RowCells[] => {\n const rows = GridRow.extractGridDetails(grid).rows;\n // Mutating. Do we care about the efficiency gain?\n if (rows.length === 0) {\n return grid;\n }\n for (let i = bounds.startRow; i <= bounds.finishRow; i++) {\n for (let j = bounds.startCol; j <= bounds.finishCol; j++) {\n // We can probably simplify this again now that we aren't reusing merge.\n const row = rows[i];\n const isLocked = GridRow.getCell(row, j).isLocked;\n GridRow.mutateCell(row, j, Structs.elementnew(substitution(), false, isLocked));\n }\n }\n return grid;\n};\n\n// substitution: () -> item\nconst unmerge = (grid: Structs.RowCells[], target: SugarElement, comparator: CompElm, substitution: Subst): Structs.RowCells[] => {\n const rows = GridRow.extractGridDetails(grid).rows;\n // Mutating. Do we care about the efficiency gain?\n let first = true;\n // tslint:disable-next-line:prefer-for-of\n for (let i = 0; i < rows.length; i++) {\n for (let j = 0; j < GridRow.cellLength(rows[0]); j++) {\n const row = rows[i];\n const currentCell = GridRow.getCell(row, j);\n const currentCellElm = currentCell.element;\n const isToReplace = comparator(currentCellElm, target);\n\n if (isToReplace && !first) {\n GridRow.mutateCell(row, j, Structs.elementnew(substitution(), true, currentCell.isLocked));\n } else if (isToReplace) {\n first = false;\n }\n }\n }\n return grid;\n};\n\nconst uniqueCells = (row: Structs.ElementNew[], comparator: CompElm): Structs.ElementNew[] => {\n return Arr.foldl(row, (rest, cell) => {\n return Arr.exists(rest, (currentCell) => {\n return comparator(currentCell.element, cell.element);\n }) ? rest : rest.concat([ cell ]);\n }, [] as Structs.ElementNew[]);\n};\n\nconst splitCols = (grid: Structs.RowCells[], index: number, comparator: CompElm, substitution: Subst): Structs.RowCells[] => {\n // We don't need to split rows if we're inserting at the first or last row of the old table\n if (index > 0 && index < grid[0].cells.length) {\n Arr.each(grid, (row) => {\n const prevCell = row.cells[index - 1];\n let offset = 0;\n const substitute = substitution();\n\n while (row.cells.length > index + offset && comparator(prevCell.element, row.cells[index + offset].element)) {\n GridRow.mutateCell(row, index + offset, Structs.elementnew(substitute, true, row.cells[index + offset].isLocked));\n offset++;\n }\n });\n }\n\n return grid;\n};\n\nconst splitRows = (grid: Structs.RowCells[], index: number, comparator: CompElm, substitution: Subst): Structs.RowCells[] => {\n // We don't need to split rows if we're inserting at the first or last row of the old table\n const rows = GridRow.extractGridDetails(grid).rows;\n if (index > 0 && index < rows.length) {\n const rowPrevCells = rows[index - 1].cells;\n const cells = uniqueCells(rowPrevCells, comparator);\n Arr.each(cells, (cell) => {\n // only make a sub when we have to\n let replacement = Optional.none>();\n for (let i = index; i < rows.length; i++) {\n for (let j = 0; j < GridRow.cellLength(rows[0]); j++) {\n const row = rows[i];\n const current = GridRow.getCell(row, j);\n const isToReplace = comparator(current.element, cell.element);\n\n if (isToReplace) {\n if (replacement.isNone()) {\n replacement = Optional.some(substitution());\n }\n replacement.each((sub) => {\n GridRow.mutateCell(row, j, Structs.elementnew(sub, true, current.isLocked));\n });\n }\n }\n }\n });\n }\n\n return grid;\n};\n\nexport {\n merge,\n unmerge,\n splitCols,\n splitRows\n};\n", "meta": {"content_hash": "ca65adc511ae0aedbdd6e75bf700d63b", "timestamp": "", "source": "github", "line_count": 113, "max_line_length": 144, "avg_line_length": 37.13274336283186, "alnum_prop": 0.6348903717826502, "repo_name": "tinymce/tinymce", "id": "98e0abc2909ea0a3267c1a8d7d87f92f13e3584d", "size": "4196", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "modules/snooker/src/main/ts/ephox/snooker/operate/MergingOperations.ts", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "9733"}, {"name": "HTML", "bytes": "183264"}, {"name": "JavaScript", "bytes": "117530"}, {"name": "Less", "bytes": "182379"}, {"name": "TypeScript", "bytes": "11764279"}]}} {"text": "\n\n", "meta": {"content_hash": "6b3dadea3ba647025bc678e04d6487e0", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 54, "avg_line_length": 47.0, "alnum_prop": 0.648936170212766, "repo_name": "RBMHTechnology/ttt", "id": "af62bb6ae91ead9584202716b1b01ca7703b5c03", "size": "94", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "ttt-ttv/src/main/resources/com/skynav/ttv/app/config.xml", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "CSS", "bytes": "22996"}, {"name": "HTML", "bytes": "2348"}, {"name": "Java", "bytes": "2833329"}, {"name": "Shell", "bytes": "2808"}, {"name": "XSLT", "bytes": "5232"}]}} {"text": "from __future__ import print_function\nfrom __future__ import unicode_literals\nfrom __future__ import division\n\nimport json\n\nfrom django.conf import settings\nfrom django.contrib.gis.geos import Polygon\nfrom django.core.urlresolvers import reverse\nfrom django.db import transaction\nfrom django.db.models import Q\nfrom django.http import HttpResponseRedirect, HttpResponseForbidden, Http404\nfrom django.shortcuts import get_object_or_404\nfrom django.utils.timezone import now\n\nfrom libs.data import merge\nfrom libs.formatters import humanize_bytes\nfrom libs.pdf_maps import create_event_map_pdf\nfrom libs.sql import get_group_tree_count\n\nfrom apps.core.helpers import (user_is_group_admin,\n user_is_eligible_to_become_trusted_mapper)\nfrom apps.core.decorators import group_request\nfrom apps.core.models import Group\n\nfrom apps.mail.views import notify_group_mapping_approved\n\nfrom apps.users.models import Follow, TrustedMapper\nfrom apps.users.forms import GroupSettingsForm\n\nfrom apps.survey.models import Territory, Survey, Blockface\nfrom apps.survey.layer_context import (get_context_for_territory_layer,\n get_context_for_territory_admin_layer)\n\nfrom apps.event.models import Event, EventRegistration\nfrom apps.event.event_list import EventList\n\nGROUP_EVENTS_ID = 'group-events'\nGROUP_EDIT_EVENTS_TAB_ID = 'events'\n\n\ndef group_list_page(request):\n # TODO: pagination\n groups = Group.objects.filter(is_active=True).order_by('name')\n group_ids = Follow.objects.filter(user_id=request.user.id) \\\n .values_list('group_id', flat=True)\n user_is_following = [group.id in group_ids for group in groups]\n\n group_infos = zip(groups, user_is_following)\n return {\n 'groups': group_infos,\n 'groups_count': len(group_infos),\n }\n\n\n@group_request\ndef _group_events(request):\n qs = Event.objects.filter(group=request.group, is_private=False)\n user_can_edit_group = user_is_group_admin(request.user,\n request.group)\n extra_context = {'user_can_edit_group': user_can_edit_group,\n 'group_slug': request.group.slug}\n return qs, extra_context\n\n\ngroup_detail_events = EventList(\n _group_events,\n name=\"group_detail_events\",\n template_path='groups/partials/detail_event_list.html')\n\n\ngroup_edit_events = EventList(\n _group_events,\n name=\"group_edit_events\",\n template_path='groups/partials/edit_event_list.html')\n\n\ndef group_detail(request):\n user = request.user\n group = request.group\n\n if not user_is_group_admin(user, group) and not request.group.is_active:\n raise Http404('Must be a group admin to view an inactive group')\n\n event_list = (group_detail_events\n .configure(chunk_size=2,\n active_filter=EventList.Filters.CURRENT,\n filterset_name=EventList.chronoFilters)\n .as_context(request, group_slug=group.slug))\n user_is_following = Follow.objects.filter(user_id=request.user.id,\n group=group).exists()\n\n show_mapper_request = user_is_eligible_to_become_trusted_mapper(user,\n group)\n\n follow_count = Follow.objects.filter(group=group).count()\n tree_count = get_group_tree_count(group)\n\n group_blocks = Territory.objects \\\n .filter(group=group) \\\n .values_list('blockface_id', flat=True)\n\n group_blocks_count = group_blocks.count()\n\n if group_blocks_count > 0:\n completed_blocks = Survey.objects \\\n .filter(blockface_id__in=group_blocks) \\\n .distinct('blockface')\n block_percent = \"{:.1%}\".format(\n float(completed_blocks.count()) / float(group_blocks.count()))\n else:\n block_percent = \"0.0%\"\n\n events_held = Event.objects.filter(group=group, ends_at__lt=now())\n num_events_held = events_held.count()\n\n num_event_attendees = EventRegistration.objects \\\n .filter(event__in=events_held) \\\n .filter(did_attend=True) \\\n .count()\n\n return {\n 'group': group,\n 'event_list': event_list,\n 'user_is_following': user_is_following,\n 'edit_url': reverse('group_edit', kwargs={'group_slug': group.slug}),\n 'show_mapper_request': show_mapper_request,\n 'counts': {\n 'tree': tree_count,\n 'block': block_percent,\n 'event': num_events_held,\n 'attendees': num_event_attendees,\n 'follows': follow_count\n },\n 'group_events_id': GROUP_EVENTS_ID,\n 'layer': get_context_for_territory_layer(request, request.group.id),\n 'territory_bounds': _group_territory_bounds(request.group),\n 'render_follow_button_without_count': request.POST.get(\n 'render_follow_button_without_count', False)\n }\n\n\ndef redirect_to_group_detail(request):\n return HttpResponseRedirect(\n reverse('group_detail', kwargs={\n 'group_slug': request.group.slug\n }))\n\n\ndef _group_territory_bounds(group):\n blockfaces = Blockface.objects \\\n .filter(territory__group=group) \\\n .collect()\n\n if blockfaces:\n return list(blockfaces.extent)\n else:\n return None\n\n\ndef edit_group(request, form=None):\n group = request.group\n if not form:\n form = GroupSettingsForm(instance=request.group, label_suffix='')\n event_list = (group_edit_events\n .configure(chunk_size=2,\n active_filter=EventList.Filters.CURRENT,\n filterset_name=EventList.chronoFilters)\n .as_context(request, group_slug=group.slug))\n pending_mappers = TrustedMapper.objects.filter(group=request.group,\n is_approved__isnull=True)\n all_mappers = TrustedMapper.objects.filter(group=request.group,\n is_approved__isnull=False)\n return {\n 'group': group,\n 'event_list': event_list,\n 'form': form,\n 'group_slug': group.slug,\n 'max_image_size': humanize_bytes(\n settings.MAX_GROUP_IMAGE_SIZE_IN_BYTES, 0),\n 'pending_mappers': pending_mappers,\n 'all_mappers': all_mappers,\n 'group_edit_events_tab_id': GROUP_EDIT_EVENTS_TAB_ID,\n }\n\n\ndef update_group_settings(request):\n form = GroupSettingsForm(request.POST, request.FILES,\n instance=request.group)\n if form.is_valid():\n form.save()\n return HttpResponseRedirect(request.group.get_absolute_url())\n else:\n return edit_group(request, form=form)\n\n\ndef follow_group(request):\n Follow.objects.get_or_create(user_id=request.user.id, group=request.group)\n return group_detail(request)\n\n\ndef unfollow_group(request):\n Follow.objects.filter(user_id=request.user.id, group=request.group) \\\n .delete()\n return group_detail(request)\n\n\ndef start_group_map_print_job(request):\n # TODO: implement\n pass\n\n\ndef give_user_mapping_priveleges(request, username):\n mapper_context = _grant_mapping_access(request.group, username,\n is_approved=True)\n mail_context = notify_group_mapping_approved(request, request.group,\n username)\n return merge(mapper_context, mail_context)\n\n\ndef remove_user_mapping_priveleges(request, username):\n return _grant_mapping_access(request.group, username, is_approved=False)\n\n\ndef _grant_mapping_access(group, username, is_approved):\n mapper, created = TrustedMapper.objects.update_or_create(\n group=group,\n user__username=username,\n defaults={'is_approved': is_approved})\n return {\n 'mapper': mapper\n }\n\n\ndef request_mapper_status(request):\n user, group = request.user, request.group\n if not user_is_eligible_to_become_trusted_mapper(user, group):\n return HttpResponseForbidden()\n mapper, created = TrustedMapper.objects.update_or_create(\n group=group, user=user)\n return {\n 'success': True\n }\n\n\ndef group_unmapped_territory_geojson(request, group_id):\n # Get unmapped blockfaces\n blockfaces = Blockface.objects.filter(is_available=True)\n\n my_territory_q = Q(territory__group_id=group_id)\n\n if request.body:\n # Get potentially selectable blockfaces in polygon\n # (those in my territory or unclaimed)\n point_list = json.loads(request.body)\n point_list.append(point_list[0]) # Close the polygon\n polygon = Polygon((point_list))\n\n no_reservations_q = \\\n Q(blockfacereservation__isnull=True) \\\n | Q(blockfacereservation__canceled_at__isnull=False) \\\n | Q(blockfacereservation__expires_at__lt=now())\n nobodys_territory_q = Q(territory__group_id=None)\n unclaimed_q = no_reservations_q & nobodys_territory_q\n\n blockfaces = blockfaces \\\n .filter(geom__within=polygon) \\\n .filter(my_territory_q | unclaimed_q) \\\n .distinct()\n\n # Return just blockface data\n # (skipping expensive queries to make tiler URLs)\n return _make_blockface_data_result(blockfaces)\n\n else:\n # Get all blockfaces in group's territory\n blockfaces = blockfaces.filter(my_territory_q)\n return _make_blockface_and_tiler_urls_result(\n request, blockfaces, group_id)\n\n\ndef group_update_territory(request, group_id):\n group = get_object_or_404(Group, id=group_id)\n\n _update_territory(group, request)\n\n # Recreate PDF maps to show updated group territory\n _update_event_maps(request, group)\n\n result_blockfaces = Blockface.objects.filter(territory__group=group)\n return _make_blockface_and_tiler_urls_result(\n request, result_blockfaces, group_id)\n\n\n@transaction.atomic\ndef _update_territory(group, request):\n new_block_ids = set([int(id) for id in json.loads(request.body)])\n old_block_ids = set(Territory.objects\n .filter(group=group)\n .values_list('blockface_id', flat=True))\n ids_to_add = new_block_ids - old_block_ids\n ids_to_kill = old_block_ids - new_block_ids\n # Make sure no unavailable or already-assigned blocks slipped in\n filtered_ids_to_add = Blockface.objects \\\n .filter(id__in=ids_to_add) \\\n .filter(is_available=True) \\\n .filter(territory=None) \\\n .values_list('id', flat=True)\n new_territory = [Territory(group=group, blockface_id=id)\n for id in filtered_ids_to_add]\n Territory.objects.bulk_create(new_territory)\n Territory.objects \\\n .filter(blockface_id__in=ids_to_kill) \\\n .delete()\n\n\ndef _update_event_maps(request, group):\n events = Event.objects \\\n .filter(group_id=group.id, begins_at__gt=now()) \\\n .select_related('group')\n for event in events:\n create_event_map_pdf(request, event)\n\n\ndef _make_blockface_and_tiler_urls_result(request, blockfaces, group_id):\n result = {\n 'blockDataList': _make_blockface_data_result(blockfaces),\n 'tilerUrls': get_context_for_territory_admin_layer(request, group_id)\n }\n return result\n\n\ndef _make_blockface_data_result(blockfaces):\n block_data_list = [{'id': bf.id, 'geojson': bf.geom.json}\n for bf in blockfaces]\n return block_data_list\n", "meta": {"content_hash": "245a3f944cd4eac948a49263f01aee37", "timestamp": "", "source": "github", "line_count": 336, "max_line_length": 78, "avg_line_length": 34.25595238095238, "alnum_prop": 0.635881841876629, "repo_name": "RickMohr/nyc-trees", "id": "0430d00d3c50a470409f54b8f021ae97848c2ee7", "size": "11534", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "src/nyc_trees/apps/users/views/group.py", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "171372"}, {"name": "CartoCSS", "bytes": "878"}, {"name": "HTML", "bytes": "157969"}, {"name": "JavaScript", "bytes": "286316"}, {"name": "Makefile", "bytes": "1524"}, {"name": "PLpgSQL", "bytes": "3210"}, {"name": "Python", "bytes": "404021"}, {"name": "Shell", "bytes": "23399"}]}} {"text": "function configure() {\n # List n-application using most space\n # @param $1|size number of biggest application to list\n # @return void\n function du-app() {\n size=\"${1:-10}\"\n dpkg-query --show --showformat=\"\\${Package;-50}\\t\\${Installed-Size}\\n\" \\\n | sort -k 2 -n \\\n | grep -v deinstall \\\n | awk '{printf \"%.1f MB \\t %s\\n\", $2/(1024), $1}' \\\n | tail -n \"$size\"\n }\n\n # human-readable sizes\n function df() { \n command df \\\n --human-readable \\\n --print-type \\\n --exclude-type squashfs \\\n --exclude-type tmpfs \\\n --exclude-type devtmpfs \\\n \"$@\"\n }\n\n # show sizes in MB\n function free() { command free -m; }\n}\nconfigure\n", "meta": {"content_hash": "73da57189a7a28882b91e1fe9a32a92a", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 80, "avg_line_length": 27.357142857142858, "alnum_prop": 0.49216710182767626, "repo_name": "edouard-lopez/dotfiles", "id": "4d9fd2c0bf074292bae2b10a9a53a4d841befd56", "size": "787", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "includes/posix/monitoring.bash", "mode": "33188", "license": "mit", "language": [{"name": "Python", "bytes": "63"}, {"name": "Shell", "bytes": "38660"}, {"name": "Vim Script", "bytes": "5525"}]}} {"text": "\npackage org.ops4j.pax.exam.sample2.model;\n\nimport javax.persistence.Entity;\nimport javax.persistence.GeneratedValue;\nimport javax.persistence.GenerationType;\nimport javax.persistence.Id;\nimport javax.persistence.Lob;\nimport javax.persistence.ManyToOne;\nimport javax.persistence.Table;\n\n@Entity\n@Table(name = \"rating\")\npublic class Rating {\n\n @Id\n @GeneratedValue(strategy = GenerationType.AUTO)\n private int id;\n\n private int stars;\n\n @Lob\n private String comment;\n\n @ManyToOne\n private User user;\n @ManyToOne\n private Movie movie;\n\n /**\n * @return the id\n */\n public int getId() {\n return id;\n }\n\n /**\n * @param id\n * the id to set\n */\n public void setId(int id) {\n this.id = id;\n }\n\n /**\n * @return the stars\n */\n public int getStars() {\n return stars;\n }\n\n /**\n * @param stars\n * the stars to set\n */\n public void setStars(int stars) {\n this.stars = stars;\n }\n\n /**\n * @return the comment\n */\n public String getComment() {\n return comment;\n }\n\n /**\n * @param comment\n * the comment to set\n */\n public void setComment(String comment) {\n this.comment = comment;\n }\n\n /**\n * @return the user\n */\n public User getUser() {\n return user;\n }\n\n /**\n * @param user\n * the user to set\n */\n public void setUser(User user) {\n this.user = user;\n }\n\n /**\n * @return the movie\n */\n public Movie getMovie() {\n return movie;\n }\n\n /**\n * @param movie\n * the movie to set\n */\n public void setMovie(Movie movie) {\n this.movie = movie;\n }\n}\n", "meta": {"content_hash": "4901789fd2534feffdc9e9d6efb0ceb9", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 51, "avg_line_length": 17.009615384615383, "alnum_prop": 0.5313736574335783, "repo_name": "bimargulies/org.ops4j.pax.exam2", "id": "0188ac1c47c0896fbf7d103e1a14246e8d489d25", "size": "2379", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "samples/pax-exam-sample2-model/src/main/java/org/ops4j/pax/exam/sample2/model/Rating.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "1271799"}, {"name": "Shell", "bytes": "198"}]}} {"text": "\n//\n// MODULE: JdbcDebug.cpp\n//\n#include \"org_apache_trafodion_jdbc_t2_JdbcDebug.h\"\n#include \"Debug.h\"\n\n/*\n * Class: org_apache_trafodion_jdbc_t2_JdbcDebug\n * Method: getDebugHandle\n * Signature: (I)I\n */\nJNIEXPORT jlong JNICALL Java_org_apache_trafodion_jdbc_t2_JdbcDebug_getDebugHandle(JNIEnv *jenv, jclass jcls,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t jlong method_name_handle)\n{\n\tjlong handle = 0;\n#if defined(_BENCHMARK)\n\thandle = (jlong) new Benchmark((const char *) method_name_handle);\n#endif /* _BENCHMARK */\n\treturn(handle);\n}\n\n/*\n * Class: org_apache_trafodion_jdbc_t2_JdbcDebug\n * Method: getMethodNameHandle\n * Signature: (Ljava/lang/String;)I\n */\nJNIEXPORT jlong JNICALL Java_org_apache_trafodion_jdbc_t2_JdbcDebug_getMethodNameHandle(JNIEnv *jenv, jclass jcls,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t jstring method_name)\n{\n\tjlong rc = 0;\n#if defined(_DEBUG) || defined(_BENCHMARK)\n\tif (method_name) rc = (jlong) jenv->GetStringUTFChars(method_name,NULL);\n\tif (rc==0) rc = (jlong) \"Unknown\";\n#endif /* _DEBUG || _BENCHMARK */\n\treturn(rc);\n}\n\n/*\n * Class: org_apache_trafodion_jdbc_t2_JdbcDebug\n * Method: methodEntry\n * Signature: (III)V\n */\nJNIEXPORT void JNICALL Java_org_apache_trafodion_jdbc_t2_JdbcDebug_methodEntry(JNIEnv *jenv, jclass jcls,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t jlong debug_handle,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t jint debug_level,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t jlong method_name_handle)\n{\n#if defined(_DEBUG)\n\tDebugFunctionEntry((const char *) method_name_handle, debug_level, NULL , NULL, 0);\n#endif /* _DEBUG */\n#if defined(_BENCHMARK)\n\t((Benchmark *) debug_handle)->Entry();\n#endif /* _BENCHMARK */\n}\n\n/*\n * Class: org_apache_trafodion_jdbc_t2_JdbcDebug\n * Method: methodReturn\n * Signature: (ILjava/lang/String;)V\n */\nJNIEXPORT void JNICALL Java_org_apache_trafodion_jdbc_t2_JdbcDebug_methodReturn(JNIEnv *jenv, jclass jcls,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tjlong debug_handle,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tjstring comment)\n{\n#if defined(_DEBUG)\n\tconst char *commentStr;\n\tif (comment) commentStr = jenv->GetStringUTFChars(comment,NULL);\n\telse commentStr = NULL;\n\tDebugFunctionReturn(\"JAVA\", commentStr, false, \"RETURNING\", NULL, 0);\n\tif (commentStr) jenv->ReleaseStringUTFChars(comment,commentStr);\n#endif /* _DEBUG */\n}\n\n/*\n * Class: org_apache_trafodion_jdbc_t2_JdbcDebug\n * Method: methodExit\n * Signature: (I)V\n */\nJNIEXPORT void JNICALL Java_org_apache_trafodion_jdbc_t2_JdbcDebug_methodExit(JNIEnv *jenv, jclass jcls,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t jlong debug_handle)\n{\n#if defined(_DEBUG)\n\tDebugFunctionReturn(\"JAVA\", NULL, true, \"EXITING\", NULL, 0);\n#endif /* _DEBUG */\n\n#if defined(_BENCHMARK)\n\t((Benchmark *) debug_handle)->Exit();\n#endif /* _BENCHMARK */\n}\n\n/*\n * Class: org_apache_trafodion_jdbc_t2_JdbcDebug\n * Method: traceOut\n * Signature: (IILjava/lang/String;)V\n */\nJNIEXPORT void JNICALL Java_org_apache_trafodion_jdbc_t2_JdbcDebug_traceOut(JNIEnv *jenv, jclass jcls,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tjlong debug_handle,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tjint debug_level,\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tjstring comment)\n{\n#if defined(_DEBUG)\n\tconst char *commentStr;\n\tif (comment) commentStr = jenv->GetStringUTFChars(comment,NULL);\n\telse commentStr = NULL;\n\tif (DebugActive(debug_level,NULL,0)) DebugOutput(commentStr , NULL, 0);\n\tif (commentStr) jenv->ReleaseStringUTFChars(comment,commentStr);\n#endif /* _DEBUG */\n}\n", "meta": {"content_hash": "1abd718dce3bc752ab0955f9348f9b61", "timestamp": "", "source": "github", "line_count": 109, "max_line_length": 114, "avg_line_length": 29.853211009174313, "alnum_prop": 0.6717885679164106, "repo_name": "mashengchen/incubator-trafodion", "id": "25ae2a001185445481545ef9e80b702621cbfedb", "size": "4264", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "core/conn/jdbc_type2/native/JdbcDebug.cpp", "mode": "33188", "license": "apache-2.0", "language": [{"name": "1C Enterprise", "bytes": "27762"}, {"name": "Awk", "bytes": "20971"}, {"name": "Batchfile", "bytes": "27013"}, {"name": "C", "bytes": "18877152"}, {"name": "C++", "bytes": "68786466"}, {"name": "CSS", "bytes": "99092"}, {"name": "GDB", "bytes": "62692"}, {"name": "Groff", "bytes": "46673"}, {"name": "HTML", "bytes": "4618"}, {"name": "Inno Setup", "bytes": "14579"}, {"name": "Java", "bytes": "12232679"}, {"name": "JavaScript", "bytes": "883279"}, {"name": "LLVM", "bytes": "42952"}, {"name": "Makefile", "bytes": "321639"}, {"name": "Objective-C", "bytes": "637659"}, {"name": "PHP", "bytes": "8438"}, {"name": "PLpgSQL", "bytes": "197622"}, {"name": "Perl", "bytes": "549860"}, {"name": "Protocol Buffer", "bytes": "121282"}, {"name": "Python", "bytes": "330201"}, {"name": "QMake", "bytes": "3622"}, {"name": "Ruby", "bytes": "8053"}, {"name": "SQLPL", "bytes": "60330"}, {"name": "Shell", "bytes": "1999068"}, {"name": "Tcl", "bytes": "2763"}, {"name": "XSLT", "bytes": "6100"}, {"name": "Yacc", "bytes": "1368077"}]}} {"text": "\npackage com.alibaba.dubbo.common.serialize.support.java;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\n\nimport com.alibaba.dubbo.common.URL;\nimport com.alibaba.dubbo.common.serialize.ObjectInput;\nimport com.alibaba.dubbo.common.serialize.ObjectOutput;\nimport com.alibaba.dubbo.common.serialize.Serialization;\n\n/**\n * @author ding.lid\n */\npublic class CompactedJavaSerialization implements Serialization {\n\n public byte getContentTypeId() {\n return 4;\n }\n\n public String getContentType() {\n return \"x-application/compactedjava\";\n }\n\n public ObjectOutput serialize(URL url, OutputStream out) throws IOException {\n return new JavaObjectOutput(out, true);\n }\n\n public ObjectInput deserialize(URL url, InputStream is) throws IOException {\n return new JavaObjectInput(is, true);\n }\n\n}", "meta": {"content_hash": "51165a6abb524dd739da671e283beb68", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 81, "avg_line_length": 25.61764705882353, "alnum_prop": 0.7416762342135477, "repo_name": "kaven12/dubboo", "id": "f0a0f0a249f804c6984b2925e343fb0702f0600c", "size": "1480", "binary": false, "copies": "20", "ref": "refs/heads/master", "path": "dubbo-common/src/main/java/com/alibaba/dubbo/common/serialize/support/java/CompactedJavaSerialization.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "3343"}, {"name": "CSS", "bytes": "18582"}, {"name": "Java", "bytes": "5237817"}, {"name": "JavaScript", "bytes": "63151"}, {"name": "Lex", "bytes": "2077"}, {"name": "Shell", "bytes": "7011"}, {"name": "Thrift", "bytes": "668"}]}} {"text": "package com.jxd.android.bookinventtory.bean;\n\nimport io.realm.RealmModel;\nimport io.realm.RealmObject;\nimport io.realm.annotations.RealmClass;\n\n/**\n * \u56fe\u4e66\u76d8\u70b9\n * Created by Administrator on 2017/10/17.\n */\n@RealmClass\npublic class ShelfBookScanBean implements RealmModel {\n\n private String title;\n private String barcode;\n private String shelfno;\n private String updatetime;\n private String uid;\n private String callno;\n private int inshelf;\n private String status;\n private String machine_mac;\n\n /**\n * \u76d8\u70b9\u72b6\u6001\n */\n private String scanStatus;\n\n public String getTitle() {\n return title;\n }\n\n public void setTitle(String title) {\n this.title = title;\n }\n\n public String getBarcode() {\n return barcode;\n }\n\n public void setBarcode(String barcode) {\n this.barcode = barcode;\n }\n\n public String getShelfno() {\n return shelfno;\n }\n\n public void setShelfno(String shelfno) {\n this.shelfno = shelfno;\n }\n\n public String getUpdatetime() {\n return updatetime;\n }\n\n public void setUpdatetime(String updatetime) {\n this.updatetime = updatetime;\n }\n\n public String getUid() {\n return uid;\n }\n\n public void setUid(String uid) {\n this.uid = uid;\n }\n\n public String getCallno() {\n return callno;\n }\n\n public void setCallno(String callno) {\n this.callno = callno;\n }\n\n public int getInshelf() {\n return inshelf;\n }\n\n public void setInshelf(int inshelf) {\n this.inshelf = inshelf;\n }\n\n public String getMachine_mac() {\n return machine_mac;\n }\n\n public void setMachine_mac(String machine_mac) {\n this.machine_mac = machine_mac;\n }\n\n public String getStatus() {\n return status;\n }\n\n public void setStatus(String status) {\n this.status = status;\n }\n\n public String getScanStatus() {\n return scanStatus;\n }\n\n public void setScanStatus(String scanStatus) {\n this.scanStatus = scanStatus;\n }\n\n public void transfor(BookBean bookBean){\n this.setBarcode( bookBean.getBarcode() );\n this.setCallno(bookBean.getCallno());\n this.setInshelf(bookBean.getInshelf());\n this.setMachine_mac(bookBean.getMachine_mac());\n this.setTitle(bookBean.getTitle());\n this.setUid(bookBean.getUid());\n this.setShelfno(bookBean.getShelfno());\n this.setStatus( bookBean.getStatus() );\n this.setUpdatetime(bookBean.getUpdatetime());\n }\n\n @Override\n public int hashCode() {\n return super.hashCode();\n }\n\n @Override\n public boolean equals(Object obj) {\n return super.equals(obj);\n }\n}\n", "meta": {"content_hash": "1cc8e2853ac762829180de45eeec8132", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 55, "avg_line_length": 21.0, "alnum_prop": 0.6201465201465202, "repo_name": "jxdong1013/bookinventory", "id": "577e0a27b8901851282783433ef072283019b34b", "size": "2746", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/android/bookinventory/app/src/main/java/com/jxd/android/bookinventtory/bean/ShelfBookScanBean.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ASP", "bytes": "98"}, {"name": "C#", "bytes": "199875"}, {"name": "CSS", "bytes": "21629"}, {"name": "HTML", "bytes": "6682"}, {"name": "Java", "bytes": "261073"}, {"name": "JavaScript", "bytes": "438316"}, {"name": "PowerShell", "bytes": "112665"}]}} {"text": "\n\n#include \"../../inc/MarlinConfig.h\"\n\n#if HAS_M206_COMMAND\n\n#include \"../gcode.h\"\n#include \"../../module/motion.h\"\n#include \"../../lcd/marlinui.h\"\n#include \"../../libs/buzzer.h\"\n#include \"../../MarlinCore.h\"\n\nvoid M206_report() {\n SERIAL_ECHOLNPAIR_P(\n LIST_N(DOUBLE(LINEAR_AXES),\n PSTR(\"M206 X\"), home_offset.x,\n SP_Y_STR, home_offset.y,\n SP_Z_STR, home_offset.z,\n SP_I_STR, home_offset.i,\n SP_J_STR, home_offset.j,\n SP_K_STR, home_offset.k,\n )\n );\n}\n\n/**\n * M206: Set Additional Homing Offset (X Y Z). SCARA aliases T=X, P=Y\n *\n * *** @thinkyhead: I recommend deprecating M206 for SCARA in favor of M665.\n * *** M206 for SCARA will remain enabled in 1.1.x for compatibility.\n * *** In the 2.0 release, it will simply be disabled by default.\n */\nvoid GcodeSuite::M206() {\n LOOP_LINEAR_AXES(i)\n if (parser.seen(AXIS_CHAR(i)))\n set_home_offset((AxisEnum)i, parser.value_linear_units());\n\n #if ENABLED(MORGAN_SCARA)\n if (parser.seen('T')) set_home_offset(A_AXIS, parser.value_float()); // Theta\n if (parser.seen('P')) set_home_offset(B_AXIS, parser.value_float()); // Psi\n #endif\n\n if (!parser.seen(LINEAR_AXIS_GANG(\"X\", \"Y\", \"Z\", \"I\", \"J\", \"K\")))\n M206_report();\n else\n report_current_position();\n}\n\n/**\n * M428: Set home_offset based on the distance between the\n * current_position and the nearest \"reference point.\"\n * If an axis is past center its endstop position\n * is the reference-point. Otherwise it uses 0. This allows\n * the Z offset to be set near the bed when using a max endstop.\n *\n * M428 can't be used more than 2cm away from 0 or an endstop.\n *\n * Use M206 to set these values directly.\n */\nvoid GcodeSuite::M428() {\n if (homing_needed_error()) return;\n\n xyz_float_t diff;\n LOOP_LINEAR_AXES(i) {\n diff[i] = base_home_pos((AxisEnum)i) - current_position[i];\n if (!WITHIN(diff[i], -20, 20) && home_dir((AxisEnum)i) > 0)\n diff[i] = -current_position[i];\n if (!WITHIN(diff[i], -20, 20)) {\n SERIAL_ERROR_MSG(STR_ERR_M428_TOO_FAR);\n LCD_ALERTMESSAGEPGM_P(PSTR(\"Err: Too far!\"));\n BUZZ(200, 40);\n return;\n }\n }\n\n LOOP_LINEAR_AXES(i) set_home_offset((AxisEnum)i, diff[i]);\n report_current_position();\n LCD_MESSAGEPGM(MSG_HOME_OFFSETS_APPLIED);\n BUZZ(100, 659);\n BUZZ(100, 698);\n}\n\n#endif // HAS_M206_COMMAND\n", "meta": {"content_hash": "d5f7b1a89aaf43eebda85cc3e0a7bc6e", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 82, "avg_line_length": 28.771084337349397, "alnum_prop": 0.6134840871021775, "repo_name": "limtbk/3dprinting", "id": "51f3e7c14c049e582236c468f1a1c40acf5e4c5a", "size": "3249", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Firmware/src/Marlin/src/gcode/geometry/M206_M428.cpp", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "16427042"}, {"name": "C++", "bytes": "1508812"}, {"name": "Makefile", "bytes": "58317"}, {"name": "Objective-C", "bytes": "195319"}, {"name": "Processing", "bytes": "407203"}, {"name": "Python", "bytes": "11892"}, {"name": "Scilab", "bytes": "10211"}]}} {"text": "/*\n\n A helper function to diagram all routes in a `fluid.express` instance.\n\n */\n\n\"use strict\";\nvar fluid = fluid || require(\"infusion\");\n\nfluid.registerNamespace(\"fluid.test.express\");\n\n/**\n *\n * Diagram all routes within an express instance.\n *\n * @param {Object} expressComponent - A `fluid.express` component.\n * @return {Object} A JSON Object representing all routes within a `fluid.express` instance.\n *\n */\nfluid.test.express.diagramAllRoutes = function (expressComponent) {\n return fluid.test.express.diagramOneLevel(expressComponent, expressComponent.router._router);\n};\n\n/**\n *\n * Diagram the routes for a single component. To preserve the routing order of the stack, each level's children\n * are represented in a `children` Array.\n *\n * @param {Object} component - A `fluid.express.middleware` component.\n * @param {Object} router - The router instance within the component (if there is one).\n * @return {Object} A JSON Object representing the routes from this level down as well as the method and path for this level.\n */\nfluid.test.express.diagramOneLevel = function (component, router) {\n var thisLevel = fluid.filterKeys(component.options, [\"method\", \"path\"]);\n thisLevel.typeName = component.typeName;\n\n if (router) {\n thisLevel.children = fluid.transform(router.stack, function (layer) {\n // This is a `fluid.express.router` instance\n if (layer.handle && layer.handle.that) {\n return fluid.test.express.diagramOneLevel(layer.handle.that, layer.handle.that.router);\n }\n // This is a `fluid.express.middleware` instance\n else if (layer.route) {\n var wrapper = fluid.filterKeys(layer.route, [\"path\", \"methods\"]);\n wrapper.children = fluid.transform(layer.route.stack, function (middlewareLayer) {\n return fluid.test.express.diagramOneLevel(middlewareLayer.handle.that, middlewareLayer.handle.that.router);\n });\n return wrapper;\n }\n // This is something outside of our scope (i.e. native middleware).\n else {\n return \"Native middleware '\" + (layer.name || \"unknown\") + \"'\";\n }\n });\n }\n\n return thisLevel;\n};\n", "meta": {"content_hash": "1e7085b56a339027cd32730d523f680f", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 127, "avg_line_length": 38.389830508474574, "alnum_prop": 0.6512141280353201, "repo_name": "GPII/gpii-express", "id": "34a60008e12253be09206b56a5d1c3b76b4d7c29", "size": "2265", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "tests/js/lib/diagramAllRoutes.js", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "HTML", "bytes": "214"}, {"name": "JavaScript", "bytes": "208501"}]}} {"text": "package com.lru.memory.disk.cache;\n\n/**\n *\n * @author sathayeg\n */\npublic interface DirLocate {\n public String getPathToFile(String key) throws Exception;\n public boolean isDiskPersistent();\n}\n", "meta": {"content_hash": "3f8be3bb0d892f5fdc57eeeaa392bc41", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 61, "avg_line_length": 20.0, "alnum_prop": 0.72, "repo_name": "gaurangsathaye/JavaSimpleCache", "id": "00f1f658cb5c38b6e518eccef7f979c9316fdd38", "size": "200", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/java/com/lru/memory/disk/cache/DirLocate.java", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\n\n\n\t\n\n\n\u041a\u043b\u0430\u0441\u0438\u0444\u0438\u043a\u0430\u0446\u0438\u0458\u0430\n\n\n\n\n

\n\n\n
\n\n \n\n
\n \n \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n

\u0412\u0438\u0434\u043e\u0432\u0438\u0442\u0435 \u043d\u0430 \u0440\u0430\u043a \u0441\u0435 \u043a\u043b\u0430\u0441\u0438\u0444\u0438\u0446\u0438\u0440\u0430\u0430\u0442 \u0441\u043f\u043e\u0440\u0435\u0434 \u0432\u0438\u0434\u043e\u0442 \u043d\u0430 \u043a\u043b\u0435\u0442\u043a\u0438 \u043a\u043e\u0438 \u043d\u0430\u043b\u0438\u043a\u0443\u0432\u0430\u0430\u0442 \u043d\u0430 \u0442\u0443\u043c\u043e\u0440, \u0438 \u0441\u043f\u043e\u0440\u0435\u0434 \u0442\u043e\u0430, \n\t\u043d\u0430 \u0442\u043a\u0438\u0432\u043e\u0442\u043e \u0437\u0430 \u043a\u043e\u0435 \u0441\u0435 \u043f\u0440\u0435\u0442\u043f\u043e\u0441\u0442\u0430\u0432\u0443\u0432\u0430 \u0434\u0435\u043a\u0430 \u0435 \u0437\u0430\u0447\u0435\u0442\u043e\u043a \u043d\u0430 \u0442\u0443\u043c\u043e\u0440\u043e\u0442. \u041f\u043e\u0434\u043e\u043b\u0443 \u0435 \u0445\u0438\u0441\u0442\u043e\u043b\u043e\u0433\u0438\u0458\u0430\u0442\u0430 \u0438 \u043c\u0435\u0441\u0442\u043e\u043f\u043e\u043b\u043e\u0436\u0431\u0430\u0442\u0430 \u043d\u0430 \u0432\u0438\u0434\u043e\u0432\u0438\u0442\u0435 \u0440\u0430\u043a. \u041f\u0440\u0438\u043c\u0435\u0440\u0438 \u0437\u0430 \u043d\u0435\u043a\u043e\u043b\u043a\u0443 \n\t\u043e\u043f\u0448\u0442\u0438 \u043a\u0430\u0442\u0435\u0433\u043e\u0440\u0438\u0438 \u0441\u0435: \u041a\u0430\u0440\u0446\u0438\u043d\u043e\u043c: \u041c\u0430\u043b\u0438\u0433\u043d\u0438 \u0442\u0443\u043c\u043e\u0440\u0438 \u043a\u043e\u0438 \u043f\u043e\u0442\u0435\u043a\u043d\u0443\u0432\u0430\u0430\u0442 \u043e\u0434 \u0435\u043f\u0438\u0442\u0435\u043b\u043d\u0438 \u043a\u043b\u0435\u0442\u043a\u0438. \u0412\u043e \u043e\u0432\u0430\u0430 \u0433\u0440\u0443\u043f\u0430 \u043f\u0440\u0438\u043f\u0430\u0453\u0430\u0430\u0442 \u043d\u0430\u0458\u0447\u0435\u0441\u0442\u0438\u0442\u0435 \u0432\u0438\u0434\u043e\u0432\u0438 \u043d\u0430 \u0440\u0430\u043a, \n\t\u0432\u043a\u043b\u0443\u0447\u0443\u0432\u0430\u0458\u045c\u0438 \u0433\u0438 \u0440\u0430\u043a\u043e\u0442 \u043d\u0430 \u0434\u043e\u0458\u043a\u0430\u0442\u0430, \u043f\u0440\u043e\u0441\u0442\u0430\u0442\u0430\u0442\u0430, \u0431\u0435\u043b\u0438\u0442\u0435 \u0434\u0440\u043e\u0431\u043e\u0432\u0438 \u0438 \u0434\u0435\u0431\u0435\u043b\u043e\u0442\u043e \u0446\u0440\u0435\u0432\u043e. \u0421\u0430\u0440\u043a\u043e\u043c: \u041c\u0430\u043b\u0438\u0433\u043d\u0438 \u0442\u0443\u043c\u043e\u0440\u0438 \u043a\u043e\u0438 \u043f\u043e\u0442\u0435\u043a\u043d\u0443\u0432\u0430\u0430\u0442 \u043e\u0434 \u0441\u0432\u0440\u0437\u043d\u0438\u0442\u0435 \u0442\u043a\u0438\u0432\u0430,\n\t \u0438\u043b\u0438 \u043c\u0435\u0437\u0435\u043d\u0445\u0438\u043c\u0430\u043b\u043d\u0438\u0442\u0435 \u043a\u043b\u0435\u0442\u043a\u0438. \u041b\u0438\u043c\u0444\u043e\u043c \u0438 \u043b\u0435\u0443\u043a\u0435\u043c\u0438\u0458\u0430: \u041c\u0430\u043b\u0438\u0433\u043d\u043e\u043c\u0438 \u043a\u043e\u0438 \u043f\u043e\u0442\u0435\u043a\u043d\u0443\u0432\u0430\u0430\u0442 \u043e\u0434 \u0445\u0435\u043c\u0430\u0442\u043e\u043f\u043e\u0435\u0442\u0441\u043a\u0438\u0442\u0435 (\u043a\u0440\u0432\u043e\u0442\u0432\u043e\u0440\u043d\u0438\u0442\u0435) \u043a\u043b\u0435\u0442\u043a\u0438 \u0411\u0430\u043a\u0442\u0435\u0440\u0438\u0441\u043a\u0438 \u0440\u0430\u043a: \n\t \u0422\u0443\u043c\u043e\u0440\u0438 \u043a\u043e\u0438 \u043f\u043e\u0442\u0435\u043a\u043d\u0443\u0432\u0430\u0430\u0442 \u043e\u0434 \u0442\u043e\u0442\u0438\u043f\u043e\u0442\u0435\u043d\u0442\u043d\u0438\u0442\u0435 \u043a\u043b\u0435\u0442\u043a\u0438. \u041a\u0430\u0458 \u0432\u043e\u0437\u0440\u0430\u0441\u043d\u0438\u0442\u0435 \u043d\u0430\u0458\u0447\u0435\u0441\u0442\u043e \u043c\u043e\u0436\u0435 \u0434\u0430 \u0441\u0435 \u043d\u0430\u0458\u0434\u0435 \u0432\u043e \u0442\u0435\u0441\u0442\u0438\u0441\u0438\u0442\u0435 \u0438 \u0458\u0430\u0458\u0447\u043d\u0438\u0446\u0438\u0442\u0435, \u0434\u043e\u0434\u0435\u043a\u0430 \u043a\u0430\u0458 \u0444\u0435\u0442\u0443\u0441\u0438\u0442\u0435, \n\t \u0431\u0435\u0431\u0438\u045a\u0430\u0442\u0430 \u0438 \u043c\u0430\u043b\u0438\u0442\u0435 \u0434\u0435\u0446\u0430 \u043d\u0430\u0458\u0447\u0435\u0441\u0442\u043e \u0441\u0435 \u043d\u0430\u043e\u0453\u0430\u0430\u0442 \u0432\u043e \u0441\u0440\u0435\u0434\u0438\u0448\u043d\u0438\u043e\u0442 \u0434\u0435\u043b \u043d\u0430 \u0442\u0435\u043b\u043e\u0442\u043e, \u043d\u0430 \u0432\u0440\u0432\u043e\u0442 \u043d\u0430 \u043e\u043f\u0430\u0448\u043d\u0430\u0442\u0430 \u043a\u043e\u0441\u043a\u0430, \u0430 \u043a\u0430\u0458 \u043a\u043e\u045a\u0438\u0442\u0435 \u0432\u043e \u043e\u0441\u043d\u043e\u0432\u0430\u0442\u0430 \u043d\u0430 \u0447\u0435\u0440\u0435\u043f\u043e\u0442. \n\t \u0411\u043b\u0430\u0441\u0442\u0435\u043d \u0442\u0443\u043c\u043e\u0440 \u0438\u043b\u0438 \u0431\u043b\u0430\u0441\u0442\u043e\u043c: \u0422\u0443\u043c\u043e\u0440 (\u043e\u0431\u0438\u0447\u043d\u043e \u043c\u0430\u043b\u0438\u0433\u043d\u0435\u043d) \u043a\u043e\u0458 \u043d\u0430\u043b\u0438\u043a\u0443\u0432\u0430 \u043d\u0430 \u043d\u0435\u0437\u0440\u0435\u043b\u043e \u0438\u043b\u0438 \u0435\u043c\u0431\u0440\u0438\u043e\u043d\u0441\u043a\u043e \u0442\u043a\u0438\u0432\u043e. \u041e\u0432\u0438\u0435 \u0442\u0443\u043c\u043e\u0440\u0438 \u0441\u0435 \u043d\u0430\u0458\u0447\u0435\u0441\u0442\u0438 \u043a\u0430\u0458 \u0434\u0435\u0446\u0430\u0442\u0430. \n \u0442\u0443\u043c\u043e\u0440\u0438 (\u0432\u0438\u0434\u043e\u0432\u0438 \u0440\u0430\u043a) \u043e\u0431\u0438\u0447\u043d\u043e \u0441\u0435 \u0438\u043c\u0435\u043d\u0443\u0432\u0430\u0430\u0442 \u043a\u043e\u0440\u0438\u0441\u0442\u0435\u0458\u045c\u0438 \u2013\u043a\u0430\u0440\u0446\u0438\u043d\u043e\u043c, -\u0441\u0430\u0440\u043a\u043e\u043c \u0438\u043b\u0438 \u2013\u0431\u043b\u0430\u0441\u0442\u043e\u043c \u043a\u0430\u043a\u043e \u0441\u0443\u0444\u0438\u043a\u0441, \u0438 \u043b\u0430\u0442\u0438\u043d\u0441\u043a\u0438\u043e\u0442 \u0438\u043b\u0438 \u0433\u0440\u0447\u043a\u0438\u043e\u0442 \u0437\u0431\u043e\u0440 \u0437\u0430 \u043e\u0440\u0433\u0430\u043d\u043e\u0442\n \u043e\u0434 \u043a\u043e\u0458 \u043f\u043e\u0442\u0435\u043a\u043d\u0443\u0432\u0430\u0430\u0442 \u043a\u0430\u043a\u043e \u043a\u043e\u0440\u0435\u043d \u043d\u0430 \u0438\u043c\u0435\u0442\u043e. \u041d\u0430 \u043f\u0440\u0438\u043c\u0435\u0440, \u0440\u0430\u043a\u043e\u0442 \u043d\u0430 \u0446\u0440\u043d\u0438\u043e\u0442 \u0434\u0440\u043e\u0431 \u0441\u0435 \u043d\u0430\u0440\u0435\u043a\u0443\u0432\u0430 \u0445\u0435\u043f\u0430\u0442\u043e\u043a\u0430\u0440\u0446\u0438\u043d\u043e\u043c, \u0440\u0430\u043a\u043e\u0442 \u043d\u0430 \u043c\u0430\u0441\u043d\u0438\u0442\u0435 \u043a\u043b\u0435\u0442\u043a\u0438- \u043b\u0438\u043f\u043e\u0441\u0430\u0440\u043a\u043e\u043c\n \u0438\u0442\u043d. \u0411\u0435\u043d\u0438\u0433\u043d\u0438\u0442\u0435 \u0442\u0443\u043c\u043e\u0440\u0438 (\u043a\u043e\u0438 \u043d\u0435 \u0441\u0435 \u0440\u0430\u043a) \u0441\u0435 \u0438\u043c\u0435\u043d\u0443\u0432\u0430\u0430\u0442 \u043a\u043e\u0440\u0438\u0441\u0442\u0435\u0458\u045c\u0438 \u2013\u043e\u043c \u043a\u0430\u043a\u043e \u0441\u0443\u0444\u0438\u043a\u0441, \u0438 \u0438\u043c\u0435\u0442\u043e \u043d\u0430 \u043e\u0440\u0433\u0430\u043d\u043e\u0442 \u043a\u0430\u043a\u043e \u043a\u043e\u0440\u0435\u043d. \u041d\u0430 \u043f\u0440\u0438\u043c\u0435\u0440, \u0431\u0435\u043d\u0438\u0433\u043d\u0435\u043d \u0442\u0443\u043c\u043e\u0440 \n \u043d\u0430 \u043c\u0430\u0437\u043d\u0430\u0442\u0430 \u043c\u0443\u0441\u043a\u0443\u043b\u0430\u0442\u0443\u0440\u0430 \u043d\u0430 \u043c\u0430\u0442\u043a\u0430\u0442\u0430 \u0441\u0435 \u043d\u0430\u0440\u0435\u043a\u0443\u0432\u0430 \u043b\u0435\u0438\u043e\u043c\u0438\u043e\u043c (\u043f\u043e\u0447\u0435\u0441\u0442 \u0438\u0437\u0440\u0430\u0437 \u0437\u0430 \u043e\u0432\u043e\u0458 \u0432\u0438\u0434 \u0442\u0443\u043c\u043e\u0440 \u0435 \u0444\u0438\u0431\u0440\u043e\u0438\u0434). \u0417\u0430 \u0436\u0430\u043b, \n\t\u0438 \u043d\u0435\u043a\u043e\u0438 \u0432\u0438\u0434\u043e\u0432\u0438 \u0440\u0430\u043a \u0433\u043e \u043a\u043e\u0440\u0438\u0441\u0442\u0430\u0442 \u0441\u0443\u0444\u0438\u043a\u0441\u043e\u0442 \u2013\u043e\u043c, \u043a\u0430\u043a\u043e \u0448\u0442\u043e \u0435 \u0441\u043b\u0443\u0447\u0430\u0458\u043e\u0442 \u0441\u043e \u043c\u0435\u043b\u0430\u043d\u043e\u043c \u0438 \u0441\u0435\u043c\u0438\u043d\u043e\u043c\n\t\t

\n\n\n\t \n\n
\n\n\n\n", "meta": {"content_hash": "5fe8f5d153cd27a362025ba442a3f87d", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 140, "avg_line_length": 45.84782608695652, "alnum_prop": 0.7159791370317686, "repo_name": "majaradichevich/majaradichevich.github.io", "id": "7ed025a1c9e8db3faad7a92693291f6f197a9621", "size": "5642", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "klasifikacija.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "6042"}, {"name": "HTML", "bytes": "185244"}, {"name": "JavaScript", "bytes": "11231"}]}} {"text": "

Installing XFramework Generator

\n\n

You don't need to download the source code from the repo, create all the necessary files for the web app, writing two thousands line of code just to create a Hello world! app. XFramework Generator can make everything for you.

\n\n

XF Generator has a number of dependencies such as:

\n\n\n\n

To install first two of them on Mac OS X or Windows computers you just need to download a package from nodejs.org/download/. For other platforms see the readme.

\n\n

After installing node.js and npm go to terminal and install Yeoman writing npm install -g yo (with sudo if necessary).

\n\n

Almost there! After these steps you need to install XF Generator with npm install -g generator-xf.

", "meta": {"content_hash": "7ec5a698f2b1bcd09b1e097b90fbd031", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 257, "avg_line_length": 60.705882352941174, "alnum_prop": 0.7199612403100775, "repo_name": "epam/xframework", "id": "4127893e601400216714eb907402287b68e38251", "size": "1032", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/data/installing_xframework_generator.html", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "275950"}, {"name": "JavaScript", "bytes": "730289"}]}} {"text": "namespace Azure.ResourceManager.EdgeOrder.Models\n{\n /// Represents subscription registered features. \n public partial class CustomerSubscriptionRegisteredFeatures\n {\n /// Initializes a new instance of CustomerSubscriptionRegisteredFeatures. \n public CustomerSubscriptionRegisteredFeatures()\n {\n }\n\n /// Name of subscription registered feature. \n public string Name { get; set; }\n /// State of subscription registered feature. \n public string State { get; set; }\n }\n}\n", "meta": {"content_hash": "ac471d3f33987dd6c7e35e349847293a", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 102, "avg_line_length": 38.4375, "alnum_prop": 0.6813008130081301, "repo_name": "Azure/azure-sdk-for-net", "id": "1888fb1a8203597c021c5a97aa4c2106984a672a", "size": "753", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "sdk/edgeorder/Azure.ResourceManager.EdgeOrder/src/Generated/Models/CustomerSubscriptionRegisteredFeatures.cs", "mode": "33188", "license": "mit", "language": []}} {"text": "\n\n\n\n\n\nUses of Interface org.wildfly.swarm.config.messaging.activemq.server.BindingsDirectoryPathConsumer (BOM: * : All 2.3.1.Final-SNAPSHOT API)\n\n\n\n\n\n\n\n\n
\n\n\n\n\n\n\n\n
Thorntail API, 2.3.1.Final-SNAPSHOT
\n
\n
\n
    \n
  • Prev
  • \n
  • Next
  • \n
\n\n\n
\n\n
\n\n\n
\n\n
\n

Uses of Interface
org.wildfly.swarm.config.messaging.activemq.server.BindingsDirectoryPathConsumer

\n
\n
\n\n
\n\n
\n\n\n\n\n\n\n\n
Thorntail API, 2.3.1.Final-SNAPSHOT
\n
\n
\n
    \n
  • Prev
  • \n
  • Next
  • \n
\n\n\n
\n\n
\n\n\n
\n\n

Copyright © 2019 JBoss by Red Hat. All rights reserved.

\n\n\n", "meta": {"content_hash": "9d4a959a17bfed5d67d358e525d7f8e4", "timestamp": "", "source": "github", "line_count": 206, "max_line_length": 793, "avg_line_length": 64.10194174757281, "alnum_prop": 0.6843619840969329, "repo_name": "wildfly-swarm/wildfly-swarm-javadocs", "id": "1e14ac35a7c49488433c20a72a70ed6f7111d9be", "size": "13205", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "2.3.1.Final-SNAPSHOT/apidocs/org/wildfly/swarm/config/messaging/activemq/server/class-use/BindingsDirectoryPathConsumer.html", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\n\nimport ReactSharedInternals from 'shared/ReactSharedInternals';\nimport type {Transition} from './ReactFiberTracingMarkerComponent.new';\n\nconst {ReactCurrentBatchConfig} = ReactSharedInternals;\n\nexport const NoTransition = null;\n\nexport function requestCurrentTransition(): Transition | null {\n return ReactCurrentBatchConfig.transition;\n}\n", "meta": {"content_hash": "dace65b4e7262d44f7cf8c5ddcea517a", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 71, "avg_line_length": 28.5, "alnum_prop": 0.8216374269005848, "repo_name": "camsong/react", "id": "d63b99a1862ef22435389d30a9298476688a7c25", "size": "542", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "packages/react-reconciler/src/ReactFiberTransition.js", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "5225"}, {"name": "C++", "bytes": "44278"}, {"name": "CSS", "bytes": "64729"}, {"name": "CoffeeScript", "bytes": "16826"}, {"name": "HTML", "bytes": "119964"}, {"name": "JavaScript", "bytes": "6161428"}, {"name": "Makefile", "bytes": "189"}, {"name": "Python", "bytes": "259"}, {"name": "Shell", "bytes": "2306"}, {"name": "TypeScript", "bytes": "20868"}]}} {"text": "\n\n/*!\n * Copyright (c) 2019 by Contributors\n * \\file moments-inl.h\n * \\brief Moments operator\n * \\author Hao Jin\n*/\n\n#ifndef MXNET_OPERATOR_NN_MOMENTS_INL_H_\n#define MXNET_OPERATOR_NN_MOMENTS_INL_H_\n\n#include \n#include \"../tensor/broadcast_reduce_op.h\"\n\nnamespace mxnet {\nnamespace op {\n\nstruct MomentsParam : public dmlc::Parameter {\n dmlc::optional axes;\n bool keepdims;\n DMLC_DECLARE_PARAMETER(MomentsParam) {\n DMLC_DECLARE_FIELD(axes).set_default(dmlc::optional())\n .describe(\"Array of ints. Axes along which to compute mean and variance.\");\n DMLC_DECLARE_FIELD(keepdims).set_default(false)\n .describe(\"produce moments with the same dimensionality as the input.\");\n }\n};\n\ninline bool MomentsShape(const nnvm::NodeAttrs& attrs,\n mxnet::ShapeVector* in_attrs,\n mxnet::ShapeVector* out_attrs) {\n const MomentsParam& param = nnvm::get(attrs.parsed);\n CHECK_EQ(in_attrs->size(), 1U);\n CHECK_EQ(out_attrs->size(), 2U);\n\n mxnet::TShape out_shape =\n ReduceAxesShapeImpl((*in_attrs)[0], param.axes, param.keepdims, false);\n if (!param.axes.has_value() || param.axes.value().ndim() == 0) {\n LOG(FATAL) << \"Empty axes is not supported, if you would like to do global moments, \"\n << \"please pass all axes to axes argument\";\n }\n SHAPE_ASSIGN_CHECK(*out_attrs, 0, out_shape);\n SHAPE_ASSIGN_CHECK(*out_attrs, 1, out_shape);\n return true;\n}\n\ninline bool MomentsType(const nnvm::NodeAttrs& attrs,\n std::vector* in_attrs,\n std::vector* out_attrs) {\n CHECK_EQ(in_attrs->size(), 1U);\n CHECK_EQ(out_attrs->size(), 2U);\n\n TYPE_ASSIGN_CHECK(*out_attrs, 0, in_attrs->at(0));\n TYPE_ASSIGN_CHECK(*out_attrs, 1, in_attrs->at(0));\n TYPE_ASSIGN_CHECK(*in_attrs, 0, out_attrs->at(0));\n TYPE_ASSIGN_CHECK(*in_attrs, 0, out_attrs->at(1));\n return out_attrs->at(0) != -1 && out_attrs->at(1) != -1;\n}\n\nstruct VarBroadcastKernel {\n template\n MSHADOW_XINLINE static void Map(int i,\n DType *out,\n const DType *data,\n const DType *mean,\n mshadow::Shape<6> data_shape,\n mshadow::Shape<6> mean_shape) {\n size_t data_idx = i;\n size_t mean_idx = i;\n size_t data_stride = 1;\n size_t mean_stride = 1;\n for (int axis = 5; axis >= 0; --axis) {\n size_t axis_idx = data_idx % data_shape[axis];\n mean_idx -= axis_idx * data_stride;\n if (mean_shape[axis] != 1) {\n mean_idx += axis_idx * mean_stride;\n }\n data_idx /= data_shape[axis];\n data_stride *= data_shape[axis];\n mean_stride *= mean_shape[axis];\n }\n DType res = (data[i] - mean[mean_idx]);\n out[i] = res * res;\n }\n};\n\ntemplate\ninline void MomentsForwardImpl(const OpContext& ctx,\n const std::vector& inputs,\n const std::vector& req,\n const std::vector& outputs,\n const dmlc::optional& axes,\n const bool keepdims) {\n using namespace mshadow;\n using namespace mshadow_op;\n using namespace mxnet_op;\n\n Stream *s = ctx.get_stream();\n\n const TBlob& data = inputs[0];\n const TBlob& mean = outputs[0];\n const TBlob& var = outputs[1];\n\n mxnet::TShape small;\n if (keepdims) {\n small = outputs[0].shape_;\n } else {\n small = ReduceAxesShapeImpl(inputs[0].shape_, axes, true, false);\n }\n\n ReduceAxesComputeImpl(ctx, {data}, {req[0]}, {mean}, small);\n MSHADOW_TYPE_SWITCH(data.type_flag_, DType, {\n Shape<6> data_shape, mean_shape;\n for (int i = 0; i < 6; ++i) {\n data_shape[i] = (i < data.shape_.ndim()) ? data.shape_[i] : 1;\n mean_shape[i] = (i < small.ndim()) ? small[i] : 1;\n }\n Tensor temp_data =\n ctx.requested[0].get_space_typed(Shape1(data.shape_.Size()), s);;\n Kernel::Launch(s, data.shape_.Size(), temp_data.dptr_,\n data.dptr(), mean.dptr(), data_shape, mean_shape);\n ReduceAxesComputeImpl(\n ctx, {TBlob(temp_data).reshape(data.shape_)}, {kWriteTo}, {var}, small);\n });\n}\n\ntemplate\ninline void MomentsForward(const nnvm::NodeAttrs& attrs,\n const OpContext& ctx,\n const std::vector& inputs,\n const std::vector& req,\n const std::vector& outputs) {\n using namespace mshadow;\n using namespace mshadow_op;\n using namespace mxnet_op;\n\n CHECK_EQ(inputs.size(), 1U);\n CHECK_EQ(outputs.size(), 2U);\n\n const MomentsParam& param = nnvm::get(attrs.parsed);\n\n MomentsForwardImpl(ctx, inputs, req, outputs, param.axes, param.keepdims);\n}\n\ntemplate\nstruct VarBackwardKernel {\n template\n MSHADOW_XINLINE static void Map(int i,\n DType *igrad,\n const DType *ograd,\n const DType *data,\n const DType *mean,\n mshadow::Shape<6> data_shape,\n mshadow::Shape<6> mean_shape,\n const float N,\n const float ddof = 0.0f) {\n size_t data_idx = i;\n size_t mean_idx = i;\n size_t data_stride = 1;\n size_t mean_stride = 1;\n for (int axis = 5; axis >= 0; --axis) {\n size_t axis_idx = data_idx % data_shape[axis];\n mean_idx -= axis_idx * data_stride;\n if (mean_shape[axis] != 1) {\n mean_idx += axis_idx * mean_stride;\n }\n data_idx /= data_shape[axis];\n data_stride *= data_shape[axis];\n mean_stride *= mean_shape[axis];\n }\n KERNEL_ASSIGN(igrad[i], req, ograd[mean_idx] * (data[i] - mean[mean_idx]) * 2 / (N - ddof));\n }\n};\n\ntemplate\ninline void MomentsBackwardImpl(const nnvm::NodeAttrs& attrs,\n const OpContext& ctx,\n const std::vector& inputs,\n const std::vector& req,\n const std::vector& outputs,\n const dmlc::optional& axes) {\n using namespace mshadow;\n using namespace mshadow::expr;\n using namespace mshadow_op;\n using namespace mxnet_op;\n\n Stream *s = ctx.get_stream();\n\n const TBlob& mean_grad = inputs[0];\n const TBlob& var_grad = inputs[1];\n const TBlob& data = inputs[2];\n const TBlob& mean = inputs[3];\n const TBlob& var = inputs[4];\n const TBlob& data_grad = outputs[0];\n\n mxnet::TShape small = ReduceAxesShapeImpl(data.shape_, axes, true, false);\n BroadcastComputeImpl(attrs, ctx, {mean_grad}, req, outputs, small);\n MSHADOW_TYPE_SWITCH(outputs[0].type_flag_, DType, {\n Tensor igrad = outputs[0].FlatTo1D(s);\n igrad /= scalar(outputs[0].Size()/inputs[0].Size());\n });\n\n Shape<6> data_shape, var_shape;\n float N = data_grad.Size() / var.Size();\n for (int i = 0; i < 6; ++i) {\n data_shape[i] = (i < data.shape_.ndim()) ? data.shape_[i] : 1;\n var_shape[i] = (i < small.ndim()) ? small[i] : 1;\n }\n MSHADOW_TYPE_SWITCH(data_grad.type_flag_, DType, {\n Kernel, xpu>::Launch(\n s, data_grad.shape_.Size(), data_grad.dptr(), var_grad.dptr(),\n data.dptr(), mean.dptr(), data_shape, var_shape, N);\n });\n}\n\ntemplate\ninline void MomentsBackward(const nnvm::NodeAttrs& attrs,\n const OpContext& ctx,\n const std::vector& inputs,\n const std::vector& req,\n const std::vector& outputs) {\n using namespace mshadow;\n using namespace mshadow_op;\n using namespace mxnet_op;\n\n CHECK_EQ(inputs.size(), 5U);\n CHECK_EQ(outputs.size(), 1U);\n\n const MomentsParam& param = nnvm::get(attrs.parsed);\n\n MomentsBackwardImpl(attrs, ctx, inputs, req, outputs, param.axes);\n}\n\n} // namespace op\n} // namespace mxnet\n#endif // MXNET_OPERATOR_NN_MOMENTS_INL_H_\n", "meta": {"content_hash": "1379c25c73b10bf4cbf94d1e3e5177af", "timestamp": "", "source": "github", "line_count": 237, "max_line_length": 96, "avg_line_length": 36.08438818565401, "alnum_prop": 0.5748362956033677, "repo_name": "reminisce/mxnet", "id": "6a9bdc54b9059e922e80bdd01e6bde06bf84326b", "size": "9359", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "src/operator/nn/moments-inl.h", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ANTLR", "bytes": "1731"}, {"name": "Batchfile", "bytes": "13130"}, {"name": "C", "bytes": "215572"}, {"name": "C++", "bytes": "7680259"}, {"name": "CMake", "bytes": "99958"}, {"name": "Clojure", "bytes": "622688"}, {"name": "Cuda", "bytes": "970884"}, {"name": "Dockerfile", "bytes": "85151"}, {"name": "Groovy", "bytes": "122800"}, {"name": "HTML", "bytes": "40277"}, {"name": "Java", "bytes": "205196"}, {"name": "Julia", "bytes": "436326"}, {"name": "Jupyter Notebook", "bytes": "3660387"}, {"name": "MATLAB", "bytes": "34903"}, {"name": "Makefile", "bytes": "201597"}, {"name": "Perl", "bytes": "1550163"}, {"name": "Perl 6", "bytes": "7280"}, {"name": "PowerShell", "bytes": "13786"}, {"name": "Python", "bytes": "7842403"}, {"name": "R", "bytes": "357807"}, {"name": "Scala", "bytes": "1305036"}, {"name": "Shell", "bytes": "427407"}, {"name": "Smalltalk", "bytes": "3497"}]}} {"text": "class DeviceSettingListener < AsakusaSatellite::Hook::Listener\n render_on :account_setting_item, :partial => \"device_setting\"\nend\n\n", "meta": {"content_hash": "46ad3aa3b2c9fc525d06733f4de789d0", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 63, "avg_line_length": 33.0, "alnum_prop": 0.7803030303030303, "repo_name": "mallowlabs/AsakusaSatellite", "id": "fc863576a1861c4131cf2c1c604d62a735622fb0", "size": "132", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "plugins/as_device_setting/lib/device_setting_listener.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "4295"}, {"name": "JavaScript", "bytes": "70132"}, {"name": "Ruby", "bytes": "176805"}, {"name": "Shell", "bytes": "1621"}]}} {"text": "require_relative '../../spec_helper'\n\nVALID_UUID_REGEX = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i\nUUID_STRING1 = 'de305d54-75b4-431b-adb2-eb6b9e546014'\nUUID_STRING2 = 'de305d54-75b4-431b-adb2-eb6b9e546020'\nUUID_PACKED_STRING1 = \"\\xde\\x30\\x5d\\x54\\x75\\xb4\\x43\\x1b\\xad\\xb2\\xeb\\x6b\\x9e\\x54\\x60\\x14\"\nUUID_PACKED_STRING2 = \"\\xde\\x30\\x5d\\x54\\x75\\xb4\\x43\\x1b\\xad\\xb2\\xeb\\x6b\\x9e\\x54\\x60\\x20\"\n\nRSpec.describe Lobster::Uuid do\n\n describe '::DEFAULT' do\n subject { Lobster::Uuid::DEFAULT }\n\n it 'is all zeroes' do\n expect(subject).to eq(\"\\x0\" * 16)\n end\n\n it 'is frozen' do\n is_expected.to be_frozen\n end\n end\n \n describe '#new' do\n context 'with a valid value' do\n subject(:uuid) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n\n describe 'the value' do\n subject { uuid.value }\n\n it 'is stored' do\n is_expected.to eq UUID_PACKED_STRING1\n end\n end\n end\n\n context 'with an invalid value' do\n it 'rejects a malformed string' do\n expect { Lobster::Uuid.new('foobar') }.to raise_error(ArgumentError)\n end\n end\n\n end\n\n context '.generate' do\n subject(:uuid) { Lobster::Uuid.generate }\n\n describe 'the value' do\n subject { uuid.value }\n\n # The chances of a randomly generated empty UUID is virtually impossible.\n # Besides, the UUID standards don't allow generated UUID to be all zeroes.\n it 'is random' do\n is_expected.not_to eq(\"\\x0\" * 16)\n end\n end\n\n describe 'the string' do\n subject { uuid.to_s }\n\n it 'is valid' do\n is_expected.to match VALID_UUID_REGEX\n end\n end\n end\n\n describe '.parse' do\n\n context 'with a valid UUID' do\n subject { Lobster::Uuid.parse(UUID_STRING1) }\n\n it 'parses correctly' do\n expect(subject.to_s).to eq(UUID_STRING1)\n end\n end\n\n context 'with a malformed string' do\n it 'returns nil' do\n expect(Lobster::Uuid.parse('foobar')).to be_nil\n end\n end\n\n context 'with an invalid argument' do\n it 'rejects a non-string' do\n expect { Lobster::Uuid.parse(5) }.to raise_error(ArgumentError)\n end\n end\n end\n\n describe '#value' do\n subject(:uuid) { Lobster::Uuid.new(UUID_PACKED_STRING1).value }\n\n it 'is a string' do\n is_expected.to be_a(String)\n end\n\n it 'is frozen' do\n is_expected.to be_frozen\n end\n\n describe '.length' do\n subject { uuid.length }\n\n it 'is 16 bytes' do\n is_expected.to be 16\n end\n end\n end\n\n describe '#eql?' do\n subject(:uuid) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n subject { uuid.eql?(other) }\n\n context 'with two identical UUIDs' do\n let(:other) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n\n it 'returns true' do\n is_expected.to eq true\n end\n end\n\n context 'with two different UUIDs' do\n let(:other) { Lobster::Uuid.new(UUID_PACKED_STRING2) }\n\n it 'returns false' do\n is_expected.to eq false\n end\n end\n\n context 'without a UUID' do\n let(:other) { 'foobar' }\n\n it 'returns false' do\n is_expected.to eq false\n end\n end\n end\n\n describe '#==' do\n subject { first == second }\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n\n context 'with equal UUIDs' do\n let(:second) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n it 'is true' do\n is_expected.to be true\n end\n end\n\n context 'with the same instance' do\n let(:second) { first }\n it 'is true' do\n is_expected.to be true\n end\n end\n\n context 'with unequal UUIDs' do\n let(:second) { Lobster::Uuid.new(UUID_PACKED_STRING2) }\n it 'is false' do\n is_expected.to be false\n end\n end\n\n context 'with nil' do\n let(:second) { nil }\n it 'is false' do\n is_expected.to be false\n end\n end\n\n context 'with a malformed string' do\n let(:second) { 'foobar' }\n it 'is false' do\n is_expected.to be false\n end\n end\n\n context 'with a non-string' do\n let(:second) { 5 }\n it 'is false' do\n is_expected.to be false\n end\n end\n\n context 'with identical value' do\n let(:second) { first.value }\n it 'is true' do\n is_expected.to be true\n end\n end\n\n context 'with identical string' do\n let(:second) { first.to_s }\n it 'is true' do\n is_expected.to be true\n end\n end\n end\n\n describe '#<=>' do\n subject { first <=> second }\n\n context 'with an equal UUID' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n let(:second) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n it 'is 0' do\n is_expected.to eq 0\n end\n end\n\n context 'with the same instance' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n let(:second) { first }\n it 'is 0' do\n is_expected.to eq 0\n end\n end\n\n context 'with a lesser UUID' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING2) }\n let(:second) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n it 'is 1' do\n is_expected.to eq 1\n end\n end\n\n context 'with a greater UUID' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n let(:second) { Lobster::Uuid.new(UUID_PACKED_STRING2) }\n it 'is -1' do\n is_expected.to eq(-1)\n end\n end\n\n context 'with nil' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n let(:second) { nil }\n it 'is nil' do\n is_expected.to be_nil\n end\n end\n\n context 'with a number' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n let(:second) { 500 }\n it 'is nil' do\n is_expected.to be_nil\n end\n end\n\n context 'with an identical value' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n let(:second) { UUID_PACKED_STRING1 }\n it 'is 0' do\n is_expected.to eq 0\n end\n end\n\n context 'with a lesser value' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING2) }\n let(:second) { UUID_PACKED_STRING1 }\n it 'is 1' do\n is_expected.to eq 1\n end\n end\n\n context 'with a greater value' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n let(:second) { UUID_PACKED_STRING2 }\n it 'is -1' do\n is_expected.to eq(-1)\n end\n end\n\n context 'with an equal string' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n let(:second) { UUID_STRING1 }\n it 'is 0' do\n is_expected.to eq 0\n end\n end\n\n context 'with a lesser string' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING2) }\n let(:second) { UUID_STRING1 }\n it 'is 1' do\n is_expected.to eq 1\n end\n end\n\n context 'with a greater string' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1) }\n let(:second) { UUID_STRING2 }\n it 'is -1' do\n is_expected.to eq(-1)\n end\n end\n\n context 'with an invalid string' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING2) }\n let(:second) { 'foobar' }\n it 'is nil' do\n is_expected.to be_nil\n end\n end\n end\n\n describe '#hash' do\n subject { Lobster::Uuid.new(UUID_PACKED_STRING1).hash }\n\n it 'is a Fixnum' do\n is_expected.to be_a(Fixnum)\n end\n\n context 'with identical UUIDs' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1).hash }\n let(:second) { Lobster::Uuid.new(UUID_PACKED_STRING1).hash }\n\n it 'returns identical values' do\n expect(first).to eq(second)\n end\n end\n\n context 'with different UUIDs' do\n let(:first) { Lobster::Uuid.new(UUID_PACKED_STRING1).hash }\n let(:second) { Lobster::Uuid.new(UUID_PACKED_STRING2).hash }\n\n it 'returns different values' do\n expect(first).to_not eq(second)\n end\n end\n end\n\n describe '#to_s' do\n subject { Lobster::Uuid.new(UUID_PACKED_STRING1).to_s }\n\n it 'is a String' do\n is_expected.to be_a String\n end\n\n it 'is a valid UUID string' do\n is_expected.to match(VALID_UUID_REGEX)\n end\n\n it 'equals the initial value' do\n is_expected.to eq(UUID_STRING1)\n end\n\n context 'with bytes less than 16' do\n subject { Lobster::Uuid.new(\"\\x05\\x30\\x5d\\x54\\x75\\x02\\x43\\x1b\\xad\\xb2\\xeb\\x6b\\x9e\\x54\\x60\\x00\").to_s }\n\n it 'pads with zeroes' do\n is_expected.to eq('05305d54-7502-431b-adb2-eb6b9e546000')\n end\n end\n end\n\nend\n", "meta": {"content_hash": "1642c878d4fcdc5ebd2a9e8b8905586d", "timestamp": "", "source": "github", "line_count": 357, "max_line_length": 108, "avg_line_length": 23.946778711484594, "alnum_prop": 0.5853316177330682, "repo_name": "gachapon/lobster-common", "id": "ed5b0f0b607bf7c015be4f064dfa501f5129bfd2", "size": "8549", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "spec/lib/lobster/uuid_spec.rb", "mode": "33188", "license": "mit", "language": [{"name": "Ruby", "bytes": "44314"}]}} {"text": "package com.asksunny.schema.parser;\r\n\r\nimport java.util.HashMap;\r\nimport java.util.Map;\r\nimport java.util.Set;\r\n\r\npublic class KeywordDictionary extends HashMap {\r\n\r\n\t/**\r\n\t * \r\n\t */\r\n\tprivate static final long serialVersionUID = 1L;\r\n\r\n\tpublic KeywordDictionary() {\r\n\t\tthis.put(\"CREATE\", Keyword.CREATE);\r\n\t\tthis.put(\"TABLE\", Keyword.TABLE);\r\n\t\tthis.put(\"NOT\", Keyword.NOT);\r\n\t\tthis.put(\"NULL\", Keyword.NULL);\r\n\t\tthis.put(\"PRIMARY\", Keyword.PRIMARY);\r\n\t\tthis.put(\"KEY\", Keyword.KEY);\r\n\t\tthis.put(\"VARCHAR\", Keyword.VARCHAR);\r\n\t\tthis.put(\"NUMBER\", Keyword.NUMBER);\r\n\t\tthis.put(\"INT\", Keyword.INT);\r\n\t\tthis.put(\"BIGINT\", Keyword.BIGINT);\r\n\t\tthis.put(\"INTEGER\", Keyword.INTEGER);\r\n\t\tthis.put(\"DOUBLE\", Keyword.DOUBLE);\r\n\t\tthis.put(\"VARCHAR2\", Keyword.VARCHAR);\r\n\t\tthis.put(\"BINARY\", Keyword.BINARY);\r\n\t\tthis.put(\"LONG\", Keyword.LONG);\r\n\t\tthis.put(\"DATE\", Keyword.DATE);\r\n\t\tthis.put(\"TIME\", Keyword.TIME);\r\n\t\tthis.put(\"TIMESTAMP\", Keyword.TIMESTAMP);\r\n\t\tthis.put(\"ALTER\", Keyword.ALTER);\r\n\t\tthis.put(\"ADD\", Keyword.ADD);\r\n\t\tthis.put(\"CHAR\", Keyword.CHAR);\r\n\t\tthis.put(\"CONSTRAINT\", Keyword.CONSTRAINT);\r\n\t\tthis.put(\"FOREIGN\", Keyword.FOREIGN);\r\n\t\tthis.put(\"REFERENCES\", Keyword.REFERENCES);\r\n\t\tthis.put(\"UNIQUE\", Keyword.UNIQUE);\r\n\t\tthis.put(\"INDEX\", Keyword.INDEX);\r\n\t\tthis.put(\"BYTE\", Keyword.BYTE);\r\n\t\tthis.put(\"NOPARALLELCREATE\", Keyword.CREATE);\r\n\t\tthis.put(\"NOPARALLEL\", Keyword.NOPARALLEL);\r\n\t\tthis.put(\"PARALLEL\", Keyword.PARALLEL);\r\n\t\tthis.put(\"*\", Keyword.ASTERISK);\r\n\t\tthis.put(\"IMAGE\", Keyword.IMAGE);\r\n\t\tthis.put(\"TEXT\", Keyword.TEXT);\r\n\t\tthis.put(\"IDENTITY\", Keyword.IDENTITY);\r\n\t\tthis.put(\"AUTOINCREMENT\", Keyword.IDENTITY);\r\n\t\tthis.put(\"AUTO_INCREMENT\", Keyword.IDENTITY);\r\n\t\tthis.put(\"DEFAULT\", Keyword.DEFAULT);\r\n\t\tthis.put(\"COLUMN_FORMAT\", Keyword.FORMAT);\r\n\t\tthis.put(\"FORMAT\", Keyword.FORMAT);\t\t\r\n\t}\r\n\t\r\n\t\r\n\t\r\n\r\n\t@Override\r\n\tpublic Keyword get(Object key) {\r\n\t\t\r\n\t\treturn super.get(key.toString().toUpperCase());\r\n\t}\r\n\r\n\t@Override\r\n\tpublic Keyword put(String key, Keyword value) {\r\n\t\treturn super.put(key.toUpperCase(), value);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic void putAll(Map m) {\r\n\t\tSet keys = m.keySet();\r\n\t\tfor (String string : keys) {\r\n\t\t\tsuper.put(string.toUpperCase(), m.get(string));\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tpublic boolean containsKey(Object key) {\r\n\t\t\r\n\t\treturn super.containsKey(key.toString().toUpperCase());\r\n\t}\r\n\r\n}\r\n", "meta": {"content_hash": "17e0844e355090632f0e6aef8dd4ab5e", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 65, "avg_line_length": 28.75, "alnum_prop": 0.6650103519668737, "repo_name": "devsunny/jscaffold", "id": "0e5042958a41441b8acf16b22b7e71f333e3239b", "size": "2415", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/asksunny/schema/parser/KeywordDictionary.java", "mode": "33188", "license": "mit", "language": [{"name": "FreeMarker", "bytes": "37206"}, {"name": "Java", "bytes": "312644"}]}} {"text": "\n#include \"arm_math.h\"\n\n/** \n * @ingroup groupMath \n */\n\n/** \n * @defgroup offset Vector Offset \n * \n * Adds a constant offset to each element of a vector. \n * \n *
        \n *     pDst[n] = pSrc[n] + offset,   0 <= n < blockSize.        \n * 
\n * \n * There are separate functions for floating-point, Q7, Q15, and Q31 data types. \n */\n\n/** \n * @addtogroup offset \n * @{ \n */\n\n/** \n * @brief Adds a constant offset to a floating-point vector. \n * @param[in] *pSrc points to the input vector \n * @param[in] offset is the offset to be added \n * @param[out] *pDst points to the output vector \n * @param[in] blockSize number of samples in the vector \n * @return none. \n */\n\n\nvoid arm_offset_f32(\n float32_t * pSrc,\n float32_t offset,\n float32_t * pDst,\n uint32_t blockSize)\n{\n uint32_t blkCnt; /* loop counter */\n\n#ifndef ARM_MATH_CM0\n\n/* Run the below code for Cortex-M4 and Cortex-M3 */\n float32_t in1, in2, in3, in4;\n\n /*loop Unrolling */\n blkCnt = blockSize >> 2u;\n\n /* First part of the processing with loop unrolling. Compute 4 outputs at a time. \n ** a second loop below computes the remaining 1 to 3 samples. */\n while(blkCnt > 0u)\n {\n /* C = A + offset */\n /* Add offset and then store the results in the destination buffer. */\n /* read samples from source */\n in1 = *pSrc;\n in2 = *(pSrc + 1);\n\n /* add offset to input */\n in1 = in1 + offset;\n\n /* read samples from source */\n in3 = *(pSrc + 2);\n\n /* add offset to input */\n in2 = in2 + offset;\n\n /* read samples from source */\n in4 = *(pSrc + 3);\n\n /* add offset to input */\n in3 = in3 + offset;\n\n /* store result to destination */\n *pDst = in1;\n\n /* add offset to input */\n in4 = in4 + offset;\n\n /* store result to destination */\n *(pDst + 1) = in2;\n\n /* store result to destination */\n *(pDst + 2) = in3;\n\n /* store result to destination */\n *(pDst + 3) = in4;\n\n /* update pointers to process next samples */\n pSrc += 4u;\n pDst += 4u;\n\n /* Decrement the loop counter */\n blkCnt--;\n }\n\n /* If the blockSize is not a multiple of 4, compute any remaining output samples here. \n ** No loop unrolling is used. */\n blkCnt = blockSize % 0x4u;\n\n#else\n\n /* Run the below code for Cortex-M0 */\n\n /* Initialize blkCnt with number of samples */\n blkCnt = blockSize;\n\n#endif /* #ifndef ARM_MATH_CM0 */\n\n while(blkCnt > 0u)\n {\n /* C = A + offset */\n /* Add offset and then store the result in the destination buffer. */\n *pDst++ = (*pSrc++) + offset;\n\n /* Decrement the loop counter */\n blkCnt--;\n }\n}\n\n/** \n * @} end of offset group \n */\n", "meta": {"content_hash": "f2b978d8b36fff3a0eae28c44484593c", "timestamp": "", "source": "github", "line_count": 125, "max_line_length": 96, "avg_line_length": 22.592, "alnum_prop": 0.5439093484419264, "repo_name": "jys0305/highspeedlogger", "id": "d7ca663bbddb46767e61f4c926942e9b5892f6e8", "size": "3945", "binary": false, "copies": "56", "ref": "refs/heads/master", "path": "sw/libs/STM32F4xx_DSP_StdPeriph_Lib_V1.1.0/Libraries/CMSIS/DSP_Lib/Source/BasicMathFunctions/arm_offset_f32.c", "mode": "33188", "license": "mit", "language": [{"name": "Assembly", "bytes": "280928"}, {"name": "C", "bytes": "21114329"}, {"name": "C++", "bytes": "100991"}, {"name": "HTML", "bytes": "455966"}, {"name": "LSL", "bytes": "47664"}, {"name": "Makefile", "bytes": "4331"}, {"name": "Objective-C", "bytes": "1718"}]}} {"text": "\n\npackage javassist.bytecode.stackmap;\n\nimport javassist.bytecode.*;\nimport java.util.HashMap;\nimport java.util.ArrayList;\n\n/**\n * A basic block is a sequence of bytecode that does not contain jump/branch\n * instructions except at the last bytecode.\n * Since Java7 or later does not allow JSR, this class throws an exception when\n * it finds JSR.\n */\npublic class BasicBlock {\n static class JsrBytecode extends BadBytecode {\n JsrBytecode() { super(\"JSR\"); }\n }\n\n protected int position, length;\n protected int incoming; // the number of incoming branches.\n protected BasicBlock[] exit; // null if the block is a leaf.\n protected boolean stop; // true if the block ends with an unconditional jump. \n protected Catch toCatch;\n\n protected BasicBlock(int pos) {\n position = pos;\n length = 0;\n incoming = 0;\n }\n\n public static BasicBlock find(BasicBlock[] blocks, int pos)\n throws BadBytecode\n {\n for (int i = 0; i < blocks.length; i++) {\n int iPos = blocks[i].position;\n if (iPos <= pos && pos < iPos + blocks[i].length)\n return blocks[i];\n }\n\n throw new BadBytecode(\"no basic block at \" + pos);\n }\n\n public static class Catch {\n public Catch next;\n public BasicBlock body;\n public int typeIndex;\n Catch(BasicBlock b, int i, Catch c) {\n body = b;\n typeIndex = i;\n next = c;\n }\n }\n\n public String toString() {\n StringBuffer sbuf = new StringBuffer();\n String cname = this.getClass().getName();\n int i = cname.lastIndexOf('.');\n sbuf.append(i < 0 ? cname : cname.substring(i + 1));\n sbuf.append(\"[\");\n toString2(sbuf);\n sbuf.append(\"]\");\n return sbuf.toString();\n }\n\n protected void toString2(StringBuffer sbuf) {\n sbuf.append(\"pos=\").append(position).append(\", len=\")\n .append(length).append(\", in=\").append(incoming)\n .append(\", exit{\");\n if (exit != null) {\n for (int i = 0; i < exit.length; i++)\n sbuf.append(exit[i].position).append(\",\");\n }\n\n sbuf.append(\"}, {\");\n Catch th = toCatch;\n while (th != null) {\n sbuf.append(\"(\").append(th.body.position).append(\", \")\n .append(th.typeIndex).append(\"), \");\n th = th.next;\n }\n\n sbuf.append(\"}\");\n }\n\n static class Mark implements Comparable {\n int position;\n BasicBlock block;\n BasicBlock[] jump;\n boolean alwaysJmp; // true if an unconditional branch.\n int size; // 0 unless the mark indicates RETURN etc. \n Catch catcher;\n\n Mark(int p) {\n position = p;\n block = null;\n jump = null;\n alwaysJmp = false;\n size = 0;\n catcher = null;\n }\n\n public int compareTo(Object obj) {\n if (obj instanceof Mark) {\n int pos = ((Mark)obj).position;\n return position - pos;\n }\n\n return -1;\n }\n\n void setJump(BasicBlock[] bb, int s, boolean always) {\n jump = bb;\n size = s;\n alwaysJmp = always;\n }\n }\n\n public static class Maker {\n /* Override these two methods if a subclass of BasicBlock must be\n * instantiated.\n */\n protected BasicBlock makeBlock(int pos) {\n return new BasicBlock(pos);\n }\n\n protected BasicBlock[] makeArray(int size) {\n return new BasicBlock[size];\n }\n\n private BasicBlock[] makeArray(BasicBlock b) {\n BasicBlock[] array = makeArray(1);\n array[0] = b;\n return array;\n }\n\n private BasicBlock[] makeArray(BasicBlock b1, BasicBlock b2) {\n BasicBlock[] array = makeArray(2);\n array[0] = b1;\n array[1] = b2;\n return array;\n }\n\n public BasicBlock[] make(MethodInfo minfo) throws BadBytecode {\n CodeAttribute ca = minfo.getCodeAttribute();\n if (ca == null)\n return null;\n\n CodeIterator ci = ca.iterator();\n return make(ci, 0, ci.getCodeLength(), ca.getExceptionTable());\n }\n\n public BasicBlock[] make(CodeIterator ci, int begin, int end,\n ExceptionTable et)\n throws BadBytecode\n {\n HashMap marks = makeMarks(ci, begin, end, et);\n BasicBlock[] bb = makeBlocks(marks);\n addCatchers(bb, et);\n return bb;\n }\n\n /* Branch target\n */\n private Mark makeMark(HashMap table, int pos) {\n return makeMark0(table, pos, true, true);\n }\n\n /* Branch instruction.\n * size > 0\n */\n private Mark makeMark(HashMap table, int pos, BasicBlock[] jump,\n int size, boolean always) {\n Mark m = makeMark0(table, pos, false, false);\n m.setJump(jump, size, always);\n return m;\n }\n\n private Mark makeMark0(HashMap table, int pos,\n boolean isBlockBegin, boolean isTarget) {\n Integer p = new Integer(pos);\n Mark m = (Mark)table.get(p);\n if (m == null) {\n m = new Mark(pos);\n table.put(p, m);\n }\n\n if (isBlockBegin) {\n if (m.block == null)\n m.block = makeBlock(pos);\n\n if (isTarget)\n m.block.incoming++;\n }\n\n return m;\n }\n\n private HashMap makeMarks(CodeIterator ci, int begin, int end,\n ExceptionTable et)\n throws BadBytecode\n {\n ci.begin();\n ci.move(begin);\n HashMap marks = new HashMap();\n while (ci.hasNext()) {\n int index = ci.next();\n if (index >= end)\n break;\n\n int op = ci.byteAt(index);\n if ((Opcode.IFEQ <= op && op <= Opcode.IF_ACMPNE)\n || op == Opcode.IFNULL || op == Opcode.IFNONNULL) {\n Mark to = makeMark(marks, index + ci.s16bitAt(index + 1));\n Mark next = makeMark(marks, index + 3);\n makeMark(marks, index, makeArray(to.block, next.block), 3, false);\n }\n else if (Opcode.GOTO <= op && op <= Opcode.LOOKUPSWITCH)\n switch (op) {\n case Opcode.GOTO :\n makeGoto(marks, index, index + ci.s16bitAt(index + 1), 3);\n break;\n case Opcode.JSR :\n makeJsr(marks, index, index + ci.s16bitAt(index + 1), 3);\n break;\n case Opcode.RET :\n makeMark(marks, index, null, 2, true);\n break;\n case Opcode.TABLESWITCH : {\n int pos = (index & ~3) + 4;\n int low = ci.s32bitAt(pos + 4);\n int high = ci.s32bitAt(pos + 8);\n int ncases = high - low + 1;\n BasicBlock[] to = makeArray(ncases + 1);\n to[0] = makeMark(marks, index + ci.s32bitAt(pos)).block; // default branch target\n int p = pos + 12;\n int n = p + ncases * 4;\n int k = 1;\n while (p < n) {\n to[k++] = makeMark(marks, index + ci.s32bitAt(p)).block;\n p += 4;\n }\n makeMark(marks, index, to, n - index, true);\n break; }\n case Opcode.LOOKUPSWITCH : {\n int pos = (index & ~3) + 4;\n int ncases = ci.s32bitAt(pos + 4);\n BasicBlock[] to = makeArray(ncases + 1);\n to[0] = makeMark(marks, index + ci.s32bitAt(pos)).block; // default branch target\n int p = pos + 8 + 4;\n int n = p + ncases * 8 - 4;\n int k = 1;\n while (p < n) {\n to[k++] = makeMark(marks, index + ci.s32bitAt(p)).block;\n p += 8;\n }\n makeMark(marks, index, to, n - index, true);\n break; }\n }\n else if ((Opcode.IRETURN <= op && op <= Opcode.RETURN) || op == Opcode.ATHROW)\n makeMark(marks, index, null, 1, true);\n else if (op == Opcode.GOTO_W)\n makeGoto(marks, index, index + ci.s32bitAt(index + 1), 5);\n else if (op == Opcode.JSR_W)\n makeJsr(marks, index, index + ci.s32bitAt(index + 1), 5);\n else if (op == Opcode.WIDE && ci.byteAt(index + 1) == Opcode.RET)\n makeMark(marks, index, null, 4, true);\n }\n\n if (et != null) {\n int i = et.size();\n while (--i >= 0) {\n makeMark0(marks, et.startPc(i), true, false);\n makeMark(marks, et.handlerPc(i));\n }\n }\n\n return marks;\n }\n\n private void makeGoto(HashMap marks, int pos, int target, int size) {\n Mark to = makeMark(marks, target);\n BasicBlock[] jumps = makeArray(to.block);\n makeMark(marks, pos, jumps, size, true);\n }\n\n /*\n * We could ignore JSR since Java 7 or later does not allow it.\n * See The JVM Spec. Sec. 4.10.2.5.\n */\n protected void makeJsr(HashMap marks, int pos, int target, int size) throws BadBytecode {\n /*\n Mark to = makeMark(marks, target);\n Mark next = makeMark(marks, pos + size);\n BasicBlock[] jumps = makeArray(to.block, next.block);\n makeMark(marks, pos, jumps, size, false);\n */\n throw new JsrBytecode();\n }\n\n private BasicBlock[] makeBlocks(HashMap markTable) {\n Mark[] marks = (Mark[])markTable.values()\n .toArray(new Mark[markTable.size()]);\n java.util.Arrays.sort(marks);\n ArrayList blocks = new ArrayList();\n int i = 0;\n BasicBlock prev;\n if (marks.length > 0 && marks[0].position == 0 && marks[0].block != null)\n prev = getBBlock(marks[i++]);\n else\n prev = makeBlock(0);\n\n blocks.add(prev);\n while (i < marks.length) {\n Mark m = marks[i++];\n BasicBlock bb = getBBlock(m);\n if (bb == null) {\n // the mark indicates a branch instruction\n if (prev.length > 0) {\n // the previous mark already has exits.\n prev = makeBlock(prev.position + prev.length);\n blocks.add(prev);\n }\n\n prev.length = m.position + m.size - prev.position;\n prev.exit = m.jump;\n prev.stop = m.alwaysJmp;\n }\n else {\n // the mark indicates a branch target\n if (prev.length == 0) {\n prev.length = m.position - prev.position;\n bb.incoming++;\n prev.exit = makeArray(bb);\n }\n else {\n // the previous mark already has exits.\n int prevPos = prev.position;\n if (prevPos + prev.length < m.position) {\n prev = makeBlock(prevPos + prev.length);\n prev.length = m.position - prevPos;\n // the incoming flow from dead code is not counted\n // bb.incoming++;\n prev.exit = makeArray(bb);\n }\n }\n\n blocks.add(bb);\n prev = bb;\n }\n }\n\n return (BasicBlock[])blocks.toArray(makeArray(blocks.size()));\n }\n\n private static BasicBlock getBBlock(Mark m) {\n BasicBlock b = m.block;\n if (b != null && m.size > 0) {\n b.exit = m.jump;\n b.length = m.size;\n b.stop = m.alwaysJmp;\n }\n\n return b;\n }\n\n private void addCatchers(BasicBlock[] blocks, ExceptionTable et)\n throws BadBytecode\n {\n if (et == null)\n return;\n\n int i = et.size();\n while (--i >= 0) {\n BasicBlock handler = find(blocks, et.handlerPc(i));\n int start = et.startPc(i);\n int end = et.endPc(i);\n int type = et.catchType(i);\n handler.incoming--;\n for (int k = 0; k < blocks.length; k++) {\n BasicBlock bb = blocks[k];\n int iPos = bb.position;\n if (start <= iPos && iPos < end) {\n bb.toCatch = new Catch(handler, type, bb.toCatch);\n handler.incoming++;\n }\n }\n }\n }\n }\n}\n", "meta": {"content_hash": "f1a0cceb83de5d2bb6046c9fcb64b93d", "timestamp": "", "source": "github", "line_count": 391, "max_line_length": 107, "avg_line_length": 35.18414322250639, "alnum_prop": 0.4474085919895326, "repo_name": "oneliang/third-party-lib", "id": "6213e22a64b2b627e9be31e387205d826fb1866d", "size": "14461", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "javassist/javassist/bytecode/stackmap/BasicBlock.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "HTML", "bytes": "31084"}, {"name": "Java", "bytes": "14382753"}]}} {"text": "/* System calls. */\n#define SEND\t\t 1\t/* function code for sending messages */\n#define RECEIVE\t\t 2\t/* function code for receiving messages */\n#define BOTH\t\t 3\t/* function code for SEND + RECEIVE */\n#define ANY (NR_PROCS+100)\t/* receive(ANY, buf) accepts from any source */\n\n/* Task numbers, function codes and reply codes. */\n\n#define TTY -NR_TASKS\t/* terminal I/O class */\n#\tdefine TTY_READ\t 3\t/* fcn code for reading from tty */\n#\tdefine TTY_WRITE 4\t/* fcn code for writing to tty */\n#\tdefine TTY_IOCTL 5\t/* fcn code for ioctl */\n#\tdefine TTY_SETPGRP 6\t/* fcn code for setpgrp */\n#\tdefine TTY_OPEN 7\t/* fcn code for opening tty */\n#\tdefine TTY_CLOSE 8\t/* fcn code for closing tty */\n#\tdefine SUSPEND\t -998\t/* used in interrupts when tty has no data */\n\n#ifdef AM_KERNEL\n#define\tAMOEBA\n#endif\n\n#ifdef AMOEBA\n\n/* There are AM_NTASK copies of the amoeba kernel task.\n * If you change AM_NTASKS be sure to adjust kernel/table.c and fs/table.c\n */\n#define AM_NTASKS\t 4\t/* number of kernel tasks of this class */\n\n#define\tAMINT_CLASS\t (TTY+1)\t/* Amoeba event handler */\n#define AMOEBA_CLASS \t (AMINT_CLASS+AM_NTASKS) /* transaction handlers */\n#\tdefine ETHER_ARRIV 1\t/* fcn code for packet arrival */\n#\tdefine AM_TRANS 2\t/* amoeba transaction */\t\t\n#\tdefine AM_GETREQ 3\t/* amoeba getrequest */\n#\tdefine AM_PUTREP 4\t/* amoeba putrep */\n#\tdefine AM_REVIVE 6\t/* used by kernel task to revive luser task */\n#\tdefine AM_TIMEOUT 8\t/* used to talk to clock task */\n#\tdefine AM_PUTSIG 9\t/* when the luser hits the DEL ! */\n#\tdefine AM_TASK_DIED 10 /* sent if task died during a transaction */\n\n#else\t/* if AMOEBA not defined */\n\n#define\tAMOEBA_CLASS\tTTY\n\n#endif /* AMOEBA */\n\n/*\n * New class definitions should go here and should be defined relative\n * to AMOEBA_CLASS (ie. as AMOEBA_CLASS+n, for the nth task added).\n */\n\n#define IDLE (AMOEBA_CLASS+1)\t/* task to run when there's nothing to run */\n\n#define PRINTER -7\t\t/* printer I/O class */\n/* The printer uses the same commands as TTY. */\n\n#define WINCHESTER -6\t/* winchester (hard) disk class */\n#define FLOPPY -5\t/* floppy disk class */\n#\tdefine DISK_READ 3\t/* fcn code to DISK (must equal TTY_READ) */\n#\tdefine DISK_WRITE 4\t/* fcn code to DISK (must equal TTY_WRITE) */\n#\tdefine DISK_IOCTL 5\t/* fcn code for setting up RAM disk */\n#\tdefine SCATTERED_IO 6\t/* fcn code for multiple reads/writes */\n#\tdefine OPTIONAL_IO 16\t/* modifier to DISK_* codes within vector */\n\n#define MEM -4\t/* /dev/ram, /dev/(k)mem and /dev/null class */\n#\tdefine RAM_DEV 0\t/* minor device for /dev/ram */\n#\tdefine MEM_DEV 1\t/* minor device for /dev/mem */\n#\tdefine KMEM_DEV 2\t/* minor device for /dev/kmem */\n#\tdefine NULL_DEV 3\t/* minor device for /dev/null */\n#if (CHIP == INTEL)\n#\tdefine PORT_DEV 4\t/* minor device for /dev/port */\n#endif\n\n#define CLOCK -3\t/* clock class */\n#\tdefine SET_ALARM 1\t/* fcn code to CLOCK, set up alarm */\n#\tdefine GET_TIME\t 3\t/* fcn code to CLOCK, get real time */\n#\tdefine SET_TIME\t 4\t/* fcn code to CLOCK, set real time */\n#\tdefine REAL_TIME 1\t/* reply from CLOCK: here is real time */\n\n#define SYSTASK -2\t/* internal functions */\n#\tdefine SYS_XIT 1\t/* fcn code for sys_xit(parent, proc) */\n#\tdefine SYS_GETSP 2\t/* fcn code for sys_sp(proc, &new_sp) */\n#\tdefine SYS_SIG 3\t/* fcn code for sys_sig(proc, sig) */\n#\tdefine SYS_FORK 4\t/* fcn code for sys_fork(parent, child) */\n#\tdefine SYS_NEWMAP 5\t/* fcn code for sys_newmap(procno, map_ptr) */\n#\tdefine SYS_COPY 6\t/* fcn code for sys_copy(ptr) */\n#\tdefine SYS_EXEC 7\t/* fcn code for sys_exec(procno, new_sp) */\n#\tdefine SYS_TIMES 8\t/* fcn code for sys_times(procno, bufptr) */\n#\tdefine SYS_ABORT 9\t/* fcn code for sys_abort() */\n#\tdefine SYS_FRESH 10\t/* fcn code for sys_fresh() (Atari only) */\n#\tdefine SYS_KILL 11\t/* fcn code for sys_kill(proc, sig) */\n#\tdefine SYS_GBOOT 12\t/* fcn code for sys_gboot(procno, bootptr) */\n#\tdefine SYS_UMAP 13\t/* fcn code for sys_umap(procno, etc) */\n#\tdefine SYS_MEM 14\t/* fcn code for sys_mem() */\n#\tdefine SYS_TRACE 15\t/* fcn code for sys_trace(req,pid,addr,data) */\n\n#define HARDWARE -1\t/* used as source on interrupt generated msgs*/\n\n/* Names of message fields for messages to CLOCK task. */\n#define DELTA_TICKS m6_l1\t/* alarm interval in clock ticks */\n#define FUNC_TO_CALL m6_f1\t/* pointer to function to call */\n#define NEW_TIME m6_l1\t/* value to set clock to (SET_TIME) */\n#define CLOCK_PROC_NR m6_i1\t/* which proc (or task) wants the alarm? */\n#define SECONDS_LEFT m6_l1\t/* how many seconds were remaining */\n\n/* Names of message fields used for messages to block and character tasks. */\n#define DEVICE m2_i1\t/* major-minor device */\n#define PROC_NR m2_i2\t/* which (proc) wants I/O? */\n#define COUNT m2_i3\t/* how many bytes to transfer */\n#define POSITION m2_l1\t/* file offset */\n#define ADDRESS m2_p1\t/* core buffer address */\n\n/* Names of message fields for messages to TTY task. */\n#define TTY_LINE m2_i1\t/* message parameter: terminal line */\n#define TTY_REQUEST m2_i3\t/* message parameter: ioctl request code */\n#define TTY_SPEK m2_l1\t/* message parameter: ioctl speed, erasing */\n#define TTY_FLAGS m2_l2\t/* message parameter: ioctl tty mode */\n#define TTY_PGRP m2_i3 /* message parameter: process group */\n\n/* Names of messages fields used in reply messages from tasks. */\n#define REP_PROC_NR m2_i1\t/* # of proc on whose behalf I/O was done */\n#define REP_STATUS m2_i2\t/* bytes transferred or error number */\n\n/* Names of fields for copy message to SYSTASK. */\n#define SRC_SPACE m5_c1\t/* T or D space (stack is also D) */\n#define SRC_PROC_NR m5_i1\t/* process to copy from */\n#define SRC_BUFFER m5_l1\t/* virtual address where data come from */\n#define DST_SPACE m5_c2\t/* T or D space (stack is also D) */\n#define DST_PROC_NR m5_i2\t/* process to copy to */\n#define DST_BUFFER m5_l2\t/* virtual address where data go to */\n#define COPY_BYTES m5_l3\t/* number of bytes to copy */\n\n/* Field names for accounting, SYSTASK and miscellaneous. */\n#define USER_TIME m4_l1\t/* user time consumed by process */\n#define SYSTEM_TIME m4_l2\t/* system time consumed by process */\n#define CHILD_UTIME m4_l3\t/* user time consumed by process' children */\n#define CHILD_STIME m4_l4\t/* sys time consumed by process' children */\n\n#define PROC1 m1_i1\t/* indicates a process */\n#define PROC2 m1_i2\t/* indicates a process */\n#define PID m1_i3\t/* process id passed from MM to kernel */\n#define STACK_PTR m1_p1\t/* used for stack ptr in sys_exec, sys_getsp */\n#define PR m6_i1\t/* process number for sys_sig */\n#define SIGNUM m6_i2\t/* signal number for sys_sig */\n#define FUNC m6_f1\t/* function pointer for sys_sig */\n#define MEM_PTR m1_p1\t/* tells where memory map is for sys_newmap */\n#define CANCEL 0 /* general request to force a task to cancel */\n#define SIG_MAP m1_i2\t/* used by kernel for passing signal bit map */\n\n#ifdef AMOEBA\n\n/* Names of message fields for amoeba tasks */\n#define\tAM_OP\t\tm2_i1\t/* one of the above operators */\n#define\tAM_PROC_NR\tm2_i2\t/* process # of proc doing operation */\n#define\tAM_COUNT\tm2_i3\t/* size of buffer for operation */\n#define\tAM_ADDRESS\tm2_p1\t/* address of buffer for operation */\n\n/* For communication between MM and AMOEBA_CLASS kernel tasks */\n#define\tAM_STATUS\tm2_i3\t/* same use as REP_STATUS but for amoeba */\n#define\tAM_FREE_IT\tm2_l1\t/* 1=not a getreq, 0=is a getreq */\n\n/* Special for passing a physical address from the ethernet driver */\n#define\tAM_PADDR\tm2_l1\t/* to the transaction layer */\n\n#endif /* AMOEBA */\n\n#define HARD_INT 2\t/* fcn code for all hardware interrupts */\n", "meta": {"content_hash": "f9df1ef6dac46637021009afd24c97de", "timestamp": "", "source": "github", "line_count": 166, "max_line_length": 79, "avg_line_length": 47.506024096385545, "alnum_prop": 0.6505199086989601, "repo_name": "macminix/MacMinix", "id": "2096f8ba765448daed32f1f58c71692b6eaa5fdd", "size": "7886", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "include/minix/com.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Assembly", "bytes": "360566"}, {"name": "C", "bytes": "3820041"}, {"name": "C++", "bytes": "18889"}, {"name": "IDL", "bytes": "54"}, {"name": "Smalltalk", "bytes": "3572"}]}} {"text": "\n\n \n \n \n \n \n \n\n", "meta": {"content_hash": "0c5c532741b17d9555e504f006cb4d6f", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 69, "avg_line_length": 37.77777777777778, "alnum_prop": 0.6176470588235294, "repo_name": "coderwjq/ZhaZhaShop", "id": "18d9057e931dc1eec93e181d35fb48d50721ab45", "size": "340", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "shop/src/main/res/drawable/bg_tv_indicator.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "942134"}]}} {"text": "\ufeff#region Mr. Advice MVVM\n// // Mr. Advice MVVM\n// // A simple MVVM package using Mr. Advice aspect weaver\n// // https://github.com/ArxOne/MrAdvice.MVVM\n// // Released under MIT license http://opensource.org/licenses/mit-license.php\n#endregion\n\nnamespace ArxOne.MrAdvice.MVVM.Properties\n{\n /// \n /// Marks a simple auto property to be bound to a DependencyProperty\n /// Note: use , this class will be deprecated soon\n /// \n public class DependencyProperty : Dependency\n {\n }\n}\n", "meta": {"content_hash": "e6f968f6bcaa02164d63a014a40698b9", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 79, "avg_line_length": 31.764705882352942, "alnum_prop": 0.6888888888888889, "repo_name": "ArxOne/MrAdvice.MVVM", "id": "44988b339a9dae3106bd24bb29f525eac790a6ff", "size": "542", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MrAdvice.MVVM/MVVM/Properties/DependencyProperty.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "174074"}]}} {"text": "package org.ringojs.util;\n\nimport org.mozilla.javascript.Context;\nimport org.mozilla.javascript.Scriptable;\nimport org.mozilla.javascript.ContextFactory;\nimport org.mozilla.javascript.debug.DebugFrame;\nimport org.mozilla.javascript.debug.DebuggableScript;\nimport org.mozilla.javascript.debug.Debugger;\n\nimport java.util.logging.Level;\nimport java.util.logging.Logger;\n\n/**\n * A base class for Debuggers and Profilers implemented in Javascript.\n * This allows to exclude the debugger/profiler module and all modules\n * it uses to be excluded from debugging/profiling.\n */\npublic abstract class DebuggerBase implements Debugger {\n\n String debuggerScript;\n int debuggerScriptDepth = 0;\n final Logger log = Logger.getLogger(DebuggerBase.class.getName());\n\n public abstract DebuggerBase createDebugger();\n\n public abstract Object createContextData();\n\n public abstract void handleCompilationDone(Context cx, DebuggableScript fnOrScript, String source);\n\n public abstract DebugFrame getScriptFrame(Context cx, DebuggableScript fnOrScript);\n\n public void attach() {\n attach(createContextData());\n }\n\n public void setDebuggerScript(String path) {\n debuggerScript = path;\n }\n\n public void install() {\n ContextFactory factory = Context.getCurrentContext().getFactory();\n factory.addListener(new ContextFactory.Listener() {\n public void contextCreated(Context cx) {\n DebuggerBase debugger = createDebugger();\n if (debugger != null) {\n debugger.attach(createContextData());\n }\n }\n public void contextReleased(Context cx) {\n }\n });\n }\n\n public void attach(Object contextData) {\n Context cx = Context.getCurrentContext();\n cx.setDebugger(this, contextData);\n cx.setOptimizationLevel(-1);\n cx.setGeneratingDebug(true);\n }\n\n public void detach() {\n Context cx = Context.getCurrentContext();\n cx.setDebugger(null, null);\n }\n\n public Object getContextData() {\n return Context.getCurrentContext().getDebuggerContextData();\n }\n\n public synchronized void suspend() {\n try {\n wait();\n } catch (InterruptedException ir) {\n Thread.currentThread().interrupt();\n }\n }\n\n public synchronized void resume() {\n notify();\n }\n\n public DebugFrame getFrame(Context cx, DebuggableScript fnOrScript) {\n String path = fnOrScript.getSourceName();\n if (log.isLoggable(Level.FINE)) {\n log.fine(\"Getting Frame for \" + path +\n \", debugger script depth is \" + debuggerScriptDepth);\n }\n if (debuggerScriptDepth > 0 || path.equals(debuggerScript)) {\n return new DebuggerScriptFrame();\n } else {\n return getScriptFrame(cx, fnOrScript);\n }\n }\n\n /**\n * Get a string representation for the given script\n * @param script a function or script\n * @return the file and/or function name of the script\n */\n static String getScriptName(DebuggableScript script) {\n if (script.isFunction()) {\n return script.getSourceName() + \": \" + script.getFunctionName();\n } else {\n return script.getSourceName();\n }\n }\n\n class DebuggerScriptFrame implements DebugFrame {\n\n public void onEnter(Context cx, Scriptable activation, Scriptable thisObj, Object[] args) {\n log.fine(\"Entering debugger script frame\");\n debuggerScriptDepth ++;\n }\n\n public void onExit(Context cx, boolean byThrow, Object resultOrException) {\n log.fine(\"Exiting debugger script frame\");\n debuggerScriptDepth --;\n }\n\n public void onLineChange(Context cx, int lineNumber) {}\n\n public void onExceptionThrown(Context cx, Throwable ex) {}\n\n public void onDebuggerStatement(Context cx) {}\n }\n\n}\n", "meta": {"content_hash": "7bc72a3b9cca751d9857aa4ea38a39f2", "timestamp": "", "source": "github", "line_count": 127, "max_line_length": 103, "avg_line_length": 31.275590551181104, "alnum_prop": 0.6467774420946626, "repo_name": "ringo/ringojs", "id": "28b86371965cce8c9e11aa1af139908c3c679f4d", "size": "3972", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/org/ringojs/util/DebuggerBase.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "141"}, {"name": "CSS", "bytes": "613"}, {"name": "Dockerfile", "bytes": "1007"}, {"name": "HTML", "bytes": "2026"}, {"name": "Java", "bytes": "363438"}, {"name": "JavaScript", "bytes": "913850"}, {"name": "Shell", "bytes": "2989"}]}} {"text": "\n\n \n \n \n \n Facecat\n\n \n \n\n \n \n \n \n \n \n \n
\n\n\n
\n
\n
\n \n Facecat\n
\n
\n \n
\n
\n
\n\n\n\n % if 'error' in locals():\n
An error happend while processing your request.
\n % end\n
\n

Upload a picture It's more fun if it's the picture of someone.

\n\n
\n \n
\n
\n
\n \n \n Browse… \n \n \n \n
\n
\n
\n\n
\n
\n \n
\n
\n\n
\n\n
\n\n\n
\n\n \n \n \n\n\n \n\n\n", "meta": {"content_hash": "0ff5431891cb2864bd51ef0adfd0e451", "timestamp": "", "source": "github", "line_count": 110, "max_line_length": 129, "avg_line_length": 36.68181818181818, "alnum_prop": 0.5241635687732342, "repo_name": "NicolasLM/python-runabove", "id": "c289353c9893e03aa4228b58f31f19256ca86a72", "size": "4035", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "examples/facecat/views/index.html", "mode": "33188", "license": "mit", "language": []}} {"text": "\ufeffnamespace AH.ModuleController.UI.DIET.Forms\n{\n partial class frmPatientListDietAssign\n {\n /// \n /// Required designer variable.\n /// \n private System.ComponentModel.IContainer components = null;\n\n /// \n /// Clean up any resources being used.\n /// \n /// true if managed resources should be disposed; otherwise, false.\n protected override void Dispose(bool disposing)\n {\n if (disposing && (components != null))\n {\n components.Dispose();\n }\n base.Dispose(disposing);\n }\n\n #region Windows Form Designer generated code\n\n /// \n /// Required method for Designer support - do not modify\n /// the contents of this method with the code editor.\n /// \n private void InitializeComponent()\n {\n System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle1 = new System.Windows.Forms.DataGridViewCellStyle();\n System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle2 = new System.Windows.Forms.DataGridViewCellStyle();\n System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle3 = new System.Windows.Forms.DataGridViewCellStyle();\n System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle4 = new System.Windows.Forms.DataGridViewCellStyle();\n System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle5 = new System.Windows.Forms.DataGridViewCellStyle();\n System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle6 = new System.Windows.Forms.DataGridViewCellStyle();\n this.groupBox4 = new System.Windows.Forms.GroupBox();\n this.btnShow = new System.Windows.Forms.Button();\n this.smartLabel4 = new AtiqsControlLibrary.SmartLabel();\n this.cboReceivingNurseStation = new AtiqsControlLibrary.SmartComboBox();\n this.dgvDietList = new AtiqsControlLibrary.SmartDataGridView();\n this.dgvPatientList = new AtiqsControlLibrary.SmartDataGridView();\n this.txtAdmissionID = new System.Windows.Forms.TextBox();\n this.txtDeitID = new System.Windows.Forms.TextBox();\n this.pnlMain.SuspendLayout();\n this.pnlTop.SuspendLayout();\n this.groupBox4.SuspendLayout();\n ((System.ComponentModel.ISupportInitialize)(this.dgvDietList)).BeginInit();\n ((System.ComponentModel.ISupportInitialize)(this.dgvPatientList)).BeginInit();\n this.SuspendLayout();\n // \n // btnTopClose\n // \n this.btnTopClose.Location = new System.Drawing.Point(1128, 12);\n // \n // frmLabel\n // \n this.frmLabel.Location = new System.Drawing.Point(393, 9);\n this.frmLabel.Size = new System.Drawing.Size(406, 33);\n this.frmLabel.Text = \"Assign Diet For Admitted Patient \";\n // \n // pnlMain\n // \n this.pnlMain.Controls.Add(this.dgvDietList);\n this.pnlMain.Controls.Add(this.groupBox4);\n this.pnlMain.Controls.Add(this.dgvPatientList);\n this.pnlMain.Location = new System.Drawing.Point(0, 58);\n this.pnlMain.Size = new System.Drawing.Size(1260, 606);\n // \n // pnlTop\n // \n this.pnlTop.Controls.Add(this.txtDeitID);\n this.pnlTop.Controls.Add(this.txtAdmissionID);\n this.pnlTop.Size = new System.Drawing.Size(1260, 58);\n this.pnlTop.Controls.SetChildIndex(this.frmLabel, 0);\n this.pnlTop.Controls.SetChildIndex(this.btnTopClose, 0);\n this.pnlTop.Controls.SetChildIndex(this.txtAdmissionID, 0);\n this.pnlTop.Controls.SetChildIndex(this.txtDeitID, 0);\n // \n // btnEdit\n // \n this.btnEdit.Location = new System.Drawing.Point(110, 673);\n this.btnEdit.Size = new System.Drawing.Size(19, 27);\n this.btnEdit.Visible = false;\n // \n // btnSave\n // \n this.btnSave.Location = new System.Drawing.Point(135, 681);\n this.btnSave.Size = new System.Drawing.Size(33, 17);\n this.btnSave.Visible = false;\n // \n // btnDelete\n // \n this.btnDelete.Location = new System.Drawing.Point(50, 673);\n this.btnDelete.Size = new System.Drawing.Size(21, 27);\n this.btnDelete.Visible = false;\n // \n // btnNew\n // \n this.btnNew.Location = new System.Drawing.Point(1026, 670);\n this.btnNew.Click += new System.EventHandler(this.btnNew_Click);\n // \n // btnClose\n // \n this.btnClose.Location = new System.Drawing.Point(1140, 670);\n this.btnClose.TabIndex = 3;\n // \n // btnPrint\n // \n this.btnPrint.Location = new System.Drawing.Point(77, 673);\n this.btnPrint.Size = new System.Drawing.Size(27, 27);\n this.btnPrint.Visible = false;\n // \n // groupBox1\n // \n this.groupBox1.Location = new System.Drawing.Point(0, 709);\n this.groupBox1.Size = new System.Drawing.Size(1260, 25);\n // \n // groupBox4\n // \n this.groupBox4.BackColor = System.Drawing.Color.PaleTurquoise;\n this.groupBox4.Controls.Add(this.btnShow);\n this.groupBox4.Controls.Add(this.smartLabel4);\n this.groupBox4.Controls.Add(this.cboReceivingNurseStation);\n this.groupBox4.Font = new System.Drawing.Font(\"Microsoft Sans Serif\", 12F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));\n this.groupBox4.ForeColor = System.Drawing.Color.DarkBlue;\n this.groupBox4.Location = new System.Drawing.Point(3, 2);\n this.groupBox4.Name = \"groupBox4\";\n this.groupBox4.Size = new System.Drawing.Size(1256, 104);\n this.groupBox4.TabIndex = 5;\n this.groupBox4.TabStop = false;\n this.groupBox4.MouseHover += new System.EventHandler(this.groupBox4_MouseHover);\n // \n // btnShow\n // \n this.btnShow.BackColor = System.Drawing.Color.DarkSalmon;\n this.btnShow.Location = new System.Drawing.Point(540, 40);\n this.btnShow.Name = \"btnShow\";\n this.btnShow.Size = new System.Drawing.Size(238, 30);\n this.btnShow.TabIndex = 158;\n this.btnShow.Text = \"Click Patient List to Add Diet\";\n this.btnShow.UseVisualStyleBackColor = false;\n this.btnShow.Click += new System.EventHandler(this.btnShow_Click);\n // \n // smartLabel4\n // \n this.smartLabel4.AutoSize = true;\n this.smartLabel4.BackColor = System.Drawing.Color.PaleTurquoise;\n this.smartLabel4.Font = new System.Drawing.Font(\"Times New Roman\", 9.75F, System.Drawing.FontStyle.Bold);\n this.smartLabel4.Location = new System.Drawing.Point(42, 46);\n this.smartLabel4.Name = \"smartLabel4\";\n this.smartLabel4.Size = new System.Drawing.Size(120, 15);\n this.smartLabel4.TabIndex = 157;\n this.smartLabel4.Text = \"Select Nurse Station\";\n // \n // cboReceivingNurseStation\n // \n this.cboReceivingNurseStation.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;\n this.cboReceivingNurseStation.Font = new System.Drawing.Font(\"Microsoft Sans Serif\", 11F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));\n this.cboReceivingNurseStation.ForeColor = System.Drawing.Color.Blue;\n this.cboReceivingNurseStation.FormattingEnabled = true;\n this.cboReceivingNurseStation.Items.AddRange(new object[] {\n \"1\",\n \"2\"});\n this.cboReceivingNurseStation.Location = new System.Drawing.Point(168, 43);\n this.cboReceivingNurseStation.Name = \"cboReceivingNurseStation\";\n this.cboReceivingNurseStation.Size = new System.Drawing.Size(356, 26);\n this.cboReceivingNurseStation.TabIndex = 156;\n // \n // dgvDietList\n // \n this.dgvDietList.AllowUserToAddRows = false;\n this.dgvDietList.AllowUserToDeleteRows = false;\n this.dgvDietList.AllowUserToOrderColumns = true;\n this.dgvDietList.AllowUserToResizeColumns = false;\n this.dgvDietList.AllowUserToResizeRows = false;\n this.dgvDietList.BackgroundColor = System.Drawing.Color.White;\n this.dgvDietList.ColumnHeadersBorderStyle = System.Windows.Forms.DataGridViewHeaderBorderStyle.Single;\n dataGridViewCellStyle1.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleLeft;\n dataGridViewCellStyle1.BackColor = System.Drawing.Color.LightGreen;\n dataGridViewCellStyle1.Font = new System.Drawing.Font(\"Tahoma\", 8F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));\n dataGridViewCellStyle1.ForeColor = System.Drawing.Color.Black;\n dataGridViewCellStyle1.SelectionForeColor = System.Drawing.SystemColors.HighlightText;\n this.dgvDietList.ColumnHeadersDefaultCellStyle = dataGridViewCellStyle1;\n this.dgvDietList.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize;\n dataGridViewCellStyle2.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleLeft;\n dataGridViewCellStyle2.BackColor = System.Drawing.Color.White;\n dataGridViewCellStyle2.Font = new System.Drawing.Font(\"Tahoma\", 8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));\n dataGridViewCellStyle2.ForeColor = System.Drawing.Color.Black;\n dataGridViewCellStyle2.SelectionBackColor = System.Drawing.Color.Lavender;\n dataGridViewCellStyle2.SelectionForeColor = System.Drawing.Color.Crimson;\n dataGridViewCellStyle2.WrapMode = System.Windows.Forms.DataGridViewTriState.False;\n this.dgvDietList.DefaultCellStyle = dataGridViewCellStyle2;\n this.dgvDietList.Location = new System.Drawing.Point(865, 75);\n this.dgvDietList.MultiSelect = false;\n this.dgvDietList.Name = \"dgvDietList\";\n this.dgvDietList.RowHeadersVisible = false;\n dataGridViewCellStyle3.BackColor = System.Drawing.Color.White;\n this.dgvDietList.RowsDefaultCellStyle = dataGridViewCellStyle3;\n this.dgvDietList.SelectionMode = System.Windows.Forms.DataGridViewSelectionMode.FullRowSelect;\n this.dgvDietList.Size = new System.Drawing.Size(272, 395);\n this.dgvDietList.TabIndex = 159;\n this.dgvDietList.CellMouseDoubleClick += new System.Windows.Forms.DataGridViewCellMouseEventHandler(this.dgvDietList_CellMouseDoubleClick);\n // \n // dgvPatientList\n // \n this.dgvPatientList.AllowUserToAddRows = false;\n this.dgvPatientList.AllowUserToDeleteRows = false;\n this.dgvPatientList.AllowUserToOrderColumns = true;\n this.dgvPatientList.AllowUserToResizeColumns = false;\n this.dgvPatientList.AllowUserToResizeRows = false;\n this.dgvPatientList.BackgroundColor = System.Drawing.Color.White;\n this.dgvPatientList.ColumnHeadersBorderStyle = System.Windows.Forms.DataGridViewHeaderBorderStyle.Single;\n dataGridViewCellStyle4.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleLeft;\n dataGridViewCellStyle4.BackColor = System.Drawing.Color.LightGreen;\n dataGridViewCellStyle4.Font = new System.Drawing.Font(\"Tahoma\", 8F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));\n dataGridViewCellStyle4.ForeColor = System.Drawing.Color.Black;\n dataGridViewCellStyle4.SelectionForeColor = System.Drawing.SystemColors.HighlightText;\n this.dgvPatientList.ColumnHeadersDefaultCellStyle = dataGridViewCellStyle4;\n this.dgvPatientList.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize;\n dataGridViewCellStyle5.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleLeft;\n dataGridViewCellStyle5.BackColor = System.Drawing.Color.White;\n dataGridViewCellStyle5.Font = new System.Drawing.Font(\"Tahoma\", 8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0)));\n dataGridViewCellStyle5.ForeColor = System.Drawing.Color.Black;\n dataGridViewCellStyle5.SelectionBackColor = System.Drawing.Color.Lavender;\n dataGridViewCellStyle5.SelectionForeColor = System.Drawing.Color.Crimson;\n dataGridViewCellStyle5.WrapMode = System.Windows.Forms.DataGridViewTriState.False;\n this.dgvPatientList.DefaultCellStyle = dataGridViewCellStyle5;\n this.dgvPatientList.Location = new System.Drawing.Point(2, 106);\n this.dgvPatientList.MultiSelect = false;\n this.dgvPatientList.Name = \"dgvPatientList\";\n this.dgvPatientList.RowHeadersVisible = false;\n dataGridViewCellStyle6.BackColor = System.Drawing.Color.White;\n this.dgvPatientList.RowsDefaultCellStyle = dataGridViewCellStyle6;\n this.dgvPatientList.SelectionMode = System.Windows.Forms.DataGridViewSelectionMode.FullRowSelect;\n this.dgvPatientList.Size = new System.Drawing.Size(1253, 497);\n this.dgvPatientList.TabIndex = 6;\n this.dgvPatientList.CellClick += new System.Windows.Forms.DataGridViewCellEventHandler(this.dgvPatientList_CellClick);\n // \n // txtAdmissionID\n // \n this.txtAdmissionID.Location = new System.Drawing.Point(916, 9);\n this.txtAdmissionID.Name = \"txtAdmissionID\";\n this.txtAdmissionID.Size = new System.Drawing.Size(100, 20);\n this.txtAdmissionID.TabIndex = 161;\n this.txtAdmissionID.Visible = false;\n // \n // txtDeitID\n // \n this.txtDeitID.Location = new System.Drawing.Point(1022, 7);\n this.txtDeitID.Name = \"txtDeitID\";\n this.txtDeitID.Size = new System.Drawing.Size(100, 20);\n this.txtDeitID.TabIndex = 162;\n this.txtDeitID.Visible = false;\n // \n // frmPatientListDietAssign\n // \n this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);\n this.ClientSize = new System.Drawing.Size(1260, 734);\n this.isEnterTabAllow = true;\n this.Name = \"frmPatientListDietAssign\";\n this.Load += new System.EventHandler(this.frmPatientList_Load);\n this.pnlMain.ResumeLayout(false);\n this.pnlTop.ResumeLayout(false);\n this.pnlTop.PerformLayout();\n this.groupBox4.ResumeLayout(false);\n this.groupBox4.PerformLayout();\n ((System.ComponentModel.ISupportInitialize)(this.dgvDietList)).EndInit();\n ((System.ComponentModel.ISupportInitialize)(this.dgvPatientList)).EndInit();\n this.ResumeLayout(false);\n\n }\n\n #endregion\n\n private System.Windows.Forms.GroupBox groupBox4;\n private AtiqsControlLibrary.SmartDataGridView dgvPatientList;\n private AtiqsControlLibrary.SmartLabel smartLabel4;\n private AtiqsControlLibrary.SmartComboBox cboReceivingNurseStation;\n private System.Windows.Forms.Button btnShow;\n public System.Windows.Forms.TextBox txtAdmissionID;\n public System.Windows.Forms.TextBox txtDeitID;\n private AtiqsControlLibrary.SmartDataGridView dgvDietList;\n\n }\n}\n", "meta": {"content_hash": "7577a20e44e72cb7e2da2aed635be20c", "timestamp": "", "source": "github", "line_count": 286, "max_line_length": 184, "avg_line_length": 56.45454545454545, "alnum_prop": 0.6495726495726496, "repo_name": "atiq-shumon/DotNetProjects", "id": "63424a60f594df942497663d5c4bfb6bbaeb0f9a", "size": "16148", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Hospital_ERP_VS13-WCF_WF/AH.ModuleController/UI/DIET/Forms/frmPatientListDietAssign.Designer.cs", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ASP", "bytes": "1059021"}, {"name": "C#", "bytes": "39389238"}, {"name": "CSS", "bytes": "683218"}, {"name": "HTML", "bytes": "44772"}, {"name": "JavaScript", "bytes": "1343054"}, {"name": "PLpgSQL", "bytes": "340074"}, {"name": "Pascal", "bytes": "81971"}, {"name": "PowerShell", "bytes": "175142"}, {"name": "Puppet", "bytes": "2111"}, {"name": "Smalltalk", "bytes": "9"}, {"name": "XSLT", "bytes": "12347"}]}} {"text": "\npackage org.medipi.clinical.dao;\n\nimport java.util.List;\nimport org.medipi.clinical.entities.PatientGroup;\nimport org.springframework.stereotype.Repository;\n\n/**\n * Implementation of Data Access Object for Patient\n * @author rick@robinsonhq.com\n */\n@Repository\npublic class PatientGroupDAOImpl extends GenericDAOImpl implements PatientGroupDAO {\n @Override\n public List getAllGroups() {\n return this.getEntityManager().createNamedQuery(\"PatientGroup.findAll\", PatientGroup.class)\n .getResultList();\n }\n}", "meta": {"content_hash": "3bf94388278a7d76abcb340256e75471", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 99, "avg_line_length": 29.63157894736842, "alnum_prop": 0.7602131438721137, "repo_name": "Samuel789/MediPi", "id": "33522f5a6540bd8fb6f89dd443207ac049bb383a", "size": "1167", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Clinician/MediPiClinical/src/main/java/org/medipi/clinical/dao/PatientGroupDAOImpl.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "10012"}, {"name": "CSS", "bytes": "847678"}, {"name": "HTML", "bytes": "4238145"}, {"name": "Java", "bytes": "1942198"}, {"name": "JavaScript", "bytes": "2308166"}, {"name": "PHP", "bytes": "1684"}, {"name": "Python", "bytes": "66091"}, {"name": "Ruby", "bytes": "1183"}, {"name": "Shell", "bytes": "17053"}]}} {"text": "package enums;\n\npublic enum GameStatusType {\n\tGAME_WAITING,\n\tGAME_READY,\n\tGAME_PAUSE,\n\tGAME_RUNNING,\n\tGAME_END\n}\n", "meta": {"content_hash": "f493082fa8f2d360462e63e43976d837", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 28, "avg_line_length": 12.555555555555555, "alnum_prop": 0.7345132743362832, "repo_name": "ptutak/Warcaby", "id": "b49884d68c81fb001a43a5b878b7d5aac2c17e36", "size": "701", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/enums/GameStatusType.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "103900"}]}} {"text": "package com.taotao.model;\n\nimport java.util.Date;\n\n/**\n * TITLE:\u5e7f\u544a\u7684model\n * USER:TAOTK\n * DATA:2016/9/14\n * TIME:10:48\n * COMPANY:www.51pjia.com\n */\npublic class PjPoster {\n\n private Integer id; //int(11) NOT NULL,\n private Integer category_id;// int(11) DEFAULT NULL,\n private String category_name; //varchar(32) DEFAULT NULL,\n private Integer poster_sort; //int(11) DEFAULT NULL,\n private String poster_url; //varchar(50) DEFAULT NULL,\n private String poster_size; //varchar(32) DEFAULT NULL,\n private Date create_time;//datetime DEFAULT NULL,\n private Integer poster_status; //int(11) DEFAULT NULL,\n private Integer poster_address;\n private String categoryName;\n private String poster_small_url;//\u5c0f\u56fe\u7247\n\n public String getPoster_small_url() {\n return poster_small_url;\n }\n public void setPoster_small_url(String poster_small_url) {\n this.poster_small_url = poster_small_url;\n }\n public String getCategoryName() {\n return categoryName;\n }\n\n public void setCategoryName(String categoryName) {\n this.categoryName = categoryName;\n }\n\n public Integer getPoster_address() {\n return poster_address;\n }\n\n public void setPoster_address(Integer poster_address) {\n this.poster_address = poster_address;\n }\n\n public Integer getId() {\n return id;\n }\n\n public void setId(Integer id) {\n this.id = id;\n }\n\n public Integer getCategory_id() {\n return category_id;\n }\n\n public void setCategory_id(Integer category_id) {\n this.category_id = category_id;\n }\n\n public String getCategory_name() {\n return category_name;\n }\n\n public void setCategory_name(String category_name) {\n this.category_name = category_name;\n }\n\n public Integer getPoster_sort() {\n return poster_sort;\n }\n\n public void setPoster_sort(Integer poster_sort) {\n this.poster_sort = poster_sort;\n }\n\n public String getPoster_url() {\n return poster_url;\n }\n\n public void setPoster_url(String poster_url) {\n this.poster_url = poster_url;\n }\n\n public String getPoster_size() {\n return poster_size;\n }\n\n public void setPoster_size(String poster_size) {\n this.poster_size = poster_size;\n }\n\n public Date getCreate_time() {\n return create_time;\n }\n\n public void setCreate_time(Date create_time) {\n this.create_time = create_time;\n }\n\n public Integer getPoster_status() {\n return poster_status;\n }\n\n public void setPoster_status(Integer poster_status) {\n this.poster_status = poster_status;\n }\n\n\n @Override\n public String toString() {\n return \"PjPoster{\" +\n \"id=\" + id +\n \", category_id=\" + category_id +\n \", category_name='\" + category_name + '\\'' +\n \", poster_sort=\" + poster_sort +\n \", poster_url='\" + poster_url + '\\'' +\n \", poster_size='\" + poster_size + '\\'' +\n \", create_time=\" + create_time +\n \", poster_status=\" + poster_status +\n \", poster_address=\" + poster_address +\n \", categoryName='\" + categoryName + '\\'' +\n \", poster_small_url='\" + poster_small_url + '\\'' +\n '}';\n }\n}\n", "meta": {"content_hash": "7ab49f595d4875b5c338cb3b1ae66c93", "timestamp": "", "source": "github", "line_count": 129, "max_line_length": 66, "avg_line_length": 25.88372093023256, "alnum_prop": 0.5882000598981731, "repo_name": "17755332301/Tao-", "id": "c90f72f6b45864fc59a103913265bc15df0b29bc", "size": "3351", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "taotao-web/src/main/java/com/taotao/model/PjPoster.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ASP", "bytes": "31784"}, {"name": "C#", "bytes": "16030"}, {"name": "CSS", "bytes": "625671"}, {"name": "HTML", "bytes": "558187"}, {"name": "Java", "bytes": "280551"}, {"name": "JavaScript", "bytes": "4548934"}, {"name": "PHP", "bytes": "32951"}]}} {"text": "'use strict';\nvar $;\nvar _default = {\n\t'emission': '-',\n\t'qte': '-',\n\t'dist': '-'\n};\n\nfunction toggleCentre(type) {\n\tvar selector = ['.from-', ', .to-', ''].join(type + ' circle');\n\t$(selector).attr('r', function (idx, old) {return !old; })\n\t.toggleClass('highlight')\n\t;\n}\n\n/**\n * Toggle visibility of given route type. If 'type' is 'route' then display all types\n * @param {[type]} type of routes to display.\n * @param {[type]} state of input (checked or not)\n */\nfunction toggleRoute(type, state) {\n\tvar routeSet;\n\trouteSet = (type === 'route' ? $('.route')\n\t\t\t\t\t: $('.route.from-' + type + ', .route.to-' + type)\n\t\t\t\t);\n\n\trouteSet\n\t\t.attr('marker-mid', function (i, val) {return state ? 'url(#arw-mid)' : ''; })\n\t\t.attr('marker-end', function (i, val) {return state ? 'url(#arw-end)' : ''; })\n\t\t.toggleClass('show')\n\t;\n}\n\n/**\n * Sum values from 'selector' items list\n * @param {string} selector selector\n * @return {int} sum\n*/\nfunction sumData(selector) {\n\tvar sum = 0;\n\tvar formatNumber = d3.format(',.0f');\n\n\t$(selector).each(function () {\n\t\tsum += +$(this).text().replace(/(k[gm]| t)/i, '');\n\t});\n\treturn formatNumber(Math.round(sum)).replace(/,/, ' ');\n}\n\n\nfunction updateTotal() {\n\n}\n\n$('input.route').change(function () {\n\tvar type = $(this).attr('id').replace('centre-', '');\n\t$('path.route').toggleClass('show');\n\ttoggleRoute(type, $(this).prop('checked'));\n\t$('#centre-epci, #centre-transfert, #centre-traitement')\n\t\t.attr('disabled', function (idx, old) {\n\t\t\treturn !old;\n\t\t});\n});\n$('input.emission').change(function () {\n\t$('text.emission').toggleClass('show');\n\t$('.emission .value').text(function (idx, old) {\n\t\treturn (old === _default.emission ? sumData('svg text.emission'): _default.emission);\n\t});\n});\n$('input.qte').change(function () {\n\t$('text.qte').toggleClass('show');\n\t$('.qte .value').text(function (idx, old) {\n\t\treturn (old === _default.qte ? sumData('svg text.qte'): _default.qte);\n\t});\n});\n$('input.dist').change(function () {\n\t$('text.dist').toggleClass('show');\n\t$('.dist .value').text(function (idx, old) {\n\t\treturn (old === _default.dist ? sumData('svg text.dist'): _default.dist);\n\t});\n});\n\n$('input').change(function () {\n\tvar type = $(this).attr('id').replace('centre-', '');\n\ttoggleCentre(type);\n\ttoggleRoute(type, $(this).prop('checked'));\n});\n\n\n/**\n * Update counter with data from given entity\n * @param {string} id entity id (used as selector)\n * @return {void} directly update UI\n */\nfunction updateCounter(id) {\n\t$('.emission .value').text(function () {\n\t\treturn sumData('svg text' + '.' + idify(id) + '.emission');\n\t});\n\t$('.qte .value').text(function () {\n\t\treturn sumData('svg text' + '.' + idify(id) + '.qte');\n\t});\n\t$('.dist .value').text(function () {\n\t\treturn sumData('svg text' + '.' + idify(id) + '.dist');\n\t});\n}\n\n/**\n * Highlight entity's area\n * @param {string} eid entity's id\n * @return {void} directly update UI\n */\nfunction highlightEntity(eid) {\n\t// reset style on others elements\n\td3.selectAll('.entity').classed('active', false);\n\n\t// apply style to element(s)\n\td3.select('.entity.' + eid).classed('active', true);\n}\n\n/**\n * Show entity's *label*\n * @param {string} eid entity's id\n * @return {void} directly update UI\n */\nfunction showEntityLabel(eid) {\n\td3.selectAll('.entity-label').classed('show', false);\n\td3.selectAll('.entity-label.' + eid).classed('show', true);\n}\n\n/**\n * Show entity's *routes*\n * @param {string} eid entity's id\n * @return {void} directly update UI\n */\nfunction showEntityRoute(eid) {\n\td3.selectAll('.route')\n\t\t.classed('show', false)\n\t\t.attr('marker-mid', null)\n\t\t.attr('marker-end', null)\n\t;\n\td3.selectAll('.route.' + eid)\n\t\t.classed('show', true)\n\t\t.attr('marker-mid', 'url(#arw-mid)')\n\t\t.attr('marker-end', 'url(#arw-end)')\n\t;\n}", "meta": {"content_hash": "7f49a82e511dfbb1990e3eb35005e8a0", "timestamp": "", "source": "github", "line_count": 145, "max_line_length": 87, "avg_line_length": 25.917241379310344, "alnum_prop": 0.6003193187865886, "repo_name": "edouard-lopez/ECV-IJBA", "id": "b277ac6769c9e9281f4a5287b6adb9dffac04376", "size": "3758", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/scripts/ui.utils.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "30095"}, {"name": "JavaScript", "bytes": "24947"}]}} {"text": "package org.radargun.stages.cache.generators;\n\nimport java.io.Serializable;\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Constructor;\nimport java.util.Random;\n\nimport org.radargun.config.DefinitionElement;\nimport org.radargun.config.Init;\nimport org.radargun.config.Property;\nimport org.radargun.logging.Log;\nimport org.radargun.logging.LogFactory;\n\n/**\n * @author Radim Vansa <rvansa@redhat.com>\n */\n@DefinitionElement(name = \"jpa\", doc = \"Instantiates JPA entities. The constructor for the entities must match to the generateValue() method.\")\npublic class JpaValueGenerator implements ValueGenerator {\n protected static Log log = LogFactory.getLog(JpaValueGenerator.class);\n\n @Property(name = \"class\", doc = \"Fully qualified name of the value class.\", optional = false)\n private String clazzName;\n\n private Class clazz;\n private Class entityClazz;\n private Constructor ctor;\n\n @Init\n public void init() {\n try {\n entityClazz = (Class) Class.forName(\"javax.persistence.Entity\");\n clazz = Class.forName(clazzName);\n if (!clazz.isAnnotationPresent(entityClazz)) {\n throw new IllegalArgumentException(\"Class \" + clazz.getName() + \" is not an entity - no @Entity present\");\n }\n ctor = clazz.getConstructor(Object.class, int.class, Random.class);\n } catch (Exception e) {\n // trace as this can happen on master node\n log.trace(\"Could not initialize generator \" + this, e);\n }\n }\n\n @Override\n public Object generateValue(Object key, int size, Random random) {\n if (ctor == null) throw new IllegalStateException(\"The generator was not properly initialized\");\n try {\n return ctor.newInstance(key, size, random);\n } catch (Exception e) {\n throw new IllegalStateException(e);\n }\n }\n\n @Override\n public int sizeOf(Object value) {\n if (value instanceof JpaValue) {\n return ((JpaValue) value).size();\n } else {\n throw new IllegalArgumentException();\n }\n }\n\n @Override\n public boolean checkValue(Object value, Object key, int expectedSize) {\n if (clazz == null) throw new IllegalStateException(\"The generator was not properly initialized\");\n return clazz.isInstance(value) && ((JpaValue) value).check(expectedSize);\n }\n\n public static String getRandomString(int size, Random random) {\n StringBuilder sb = new StringBuilder(size);\n for (int i = 0; i < size; ++i) {\n sb.append((char) (random.nextInt(26) + 'A'));\n }\n return sb.toString();\n }\n\n public abstract static class JpaValue implements Serializable {\n public int size() {\n throw new UnsupportedOperationException();\n }\n\n public boolean check(int expectedSize) {\n return true;\n }\n }\n\n}\n", "meta": {"content_hash": "c5f03379a0c571b80855feb4f6da4571", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 143, "avg_line_length": 33.372093023255815, "alnum_prop": 0.6721254355400696, "repo_name": "rmacor/radargun", "id": "f132c47f96782b6dc7cb7cbe91a98983f4bdbf73", "size": "2870", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "extensions/cache/src/main/java/org/radargun/stages/cache/generators/JpaValueGenerator.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "1168"}, {"name": "FreeMarker", "bytes": "31553"}, {"name": "Java", "bytes": "1681217"}, {"name": "JavaScript", "bytes": "1900"}, {"name": "Protocol Buffer", "bytes": "809"}, {"name": "Python", "bytes": "625"}, {"name": "Shell", "bytes": "33354"}]}} {"text": "\n\n\n#include \"mitkGeometry2D.h\"\n#include \n\n\nmitk::Geometry2D::Geometry2D()\n : m_ScaleFactorMMPerUnitX( 1.0 ),\n m_ScaleFactorMMPerUnitY( 1.0 ),\n m_ReferenceGeometry( NULL )\n{\n}\n\nmitk::Geometry2D::Geometry2D(const Geometry2D& other)\n : Geometry3D(other), m_ScaleFactorMMPerUnitX( other.m_ScaleFactorMMPerUnitX),\n m_ScaleFactorMMPerUnitY( other.m_ScaleFactorMMPerUnitY),\n m_ReferenceGeometry( other.m_ReferenceGeometry )\n{\n}\n\n\n\nmitk::Geometry2D::~Geometry2D()\n{\n}\n\n\nvoid\nmitk::Geometry2D::SetIndexToWorldTransform(\n mitk::AffineTransform3D* transform)\n{\n Superclass::SetIndexToWorldTransform(transform);\n\n m_ScaleFactorMMPerUnitX=GetExtentInMM(0)/GetExtent(0);\n m_ScaleFactorMMPerUnitY=GetExtentInMM(1)/GetExtent(1);\n\n assert(m_ScaleFactorMMPerUnitX(m_BoundingBox.GetPointer())->IsInside(pt3d_units);\n}\n\n\nvoid\nmitk::Geometry2D::Map(const mitk::Point2D &pt2d_mm, mitk::Point3D &pt3d_mm) const\n{\n Point3D pt3d_units;\n pt3d_units[0]=pt2d_mm[0]/m_ScaleFactorMMPerUnitX;\n pt3d_units[1]=pt2d_mm[1]/m_ScaleFactorMMPerUnitY;\n pt3d_units[2]=0;\n pt3d_mm = GetParametricTransform()->TransformPoint(pt3d_units);\n}\n\n\nvoid\nmitk::Geometry2D::IndexToWorld(\n const mitk::Point2D &/*pt_units*/, mitk::Point2D &/*pt_mm*/) const\n{\n itkExceptionMacro(<< \"No general transform possible (only affine) ==> no general\" \\\n \" IndexToWorld(const mitk::Point2D &pt_mm, mitk::Point2D &pt_units)\" \\\n \" possible. Has to be implemented in sub-class.\");\n}\n\n\nvoid\nmitk::Geometry2D::WorldToIndex(\n const mitk::Point2D &/*pt_mm*/, mitk::Point2D &/*pt_units*/) const\n{\n itkExceptionMacro(<< \"No general back transform possible (only affine) ==> no general\" \\\n \" WorldToIndex(const mitk::Point2D &pt_mm, mitk::Point2D &pt_units)\" \\\n \" possible. Has to be implemented in sub-class.\");\n}\n\n\nvoid\nmitk::Geometry2D::IndexToWorld(const mitk::Point2D &/*atPt2d_units*/,\n const mitk::Vector2D &/*vec_units*/, mitk::Vector2D &/*vec_mm*/) const\n{\n itkExceptionMacro(<< \"No general transform possible (only affine) ==> no general\" \\\n \" IndexToWorld(const mitk::Vector2D &vec_mm, mitk::Vector2D &vec_units)\" \\\n \" possible. Has to be implemented in sub-class.\");\n}\n\n\nvoid\nmitk::Geometry2D::WorldToIndex(const mitk::Point2D &/*atPt2d_mm*/,\n const mitk::Vector2D &/*vec_mm*/, mitk::Vector2D &/*vec_units*/) const\n{\n itkExceptionMacro(<< \"No general back transform possible (only affine) ==> no general\" \\\n \" WorldToIndex(const mitk::Vector2D &vec_mm, mitk::Vector2D &vec_units)\" \\\n \" possible. Has to be implemented in sub-class.\");\n}\n\nvoid\nmitk::Geometry2D::SetSizeInUnits(mitk::ScalarType width, mitk::ScalarType height)\n{\n ScalarType bounds[6]={0, width, 0, height, 0, 1};\n ScalarType extent, newextentInMM;\n if(GetExtent(0)>0)\n {\n extent = GetExtent(0);\n if(width>extent)\n newextentInMM = GetExtentInMM(0)/width*extent;\n else\n newextentInMM = GetExtentInMM(0)*extent/width;\n SetExtentInMM(0, newextentInMM);\n }\n if(GetExtent(1)>0)\n {\n extent = GetExtent(1);\n if(width>extent)\n newextentInMM = GetExtentInMM(1)/height*extent;\n else\n newextentInMM = GetExtentInMM(1)*extent/height;\n SetExtentInMM(1, newextentInMM);\n }\n SetBounds(bounds);\n}\n\n\nbool\nmitk::Geometry2D::Project(\n const mitk::Point3D &pt3d_mm, mitk::Point3D &projectedPt3d_mm) const\n{\n assert(m_BoundingBox.IsNotNull());\n\n Point3D pt3d_units;\n BackTransform(pt3d_mm, pt3d_units);\n pt3d_units[2] = 0;\n projectedPt3d_mm = GetParametricTransform()->TransformPoint(pt3d_units);\n return const_cast(m_BoundingBox.GetPointer())->IsInside(pt3d_units);\n}\n\nbool\nmitk::Geometry2D::Project(const mitk::Vector3D &vec3d_mm, mitk::Vector3D &projectedVec3d_mm) const\n{\n assert(m_BoundingBox.IsNotNull());\n\n Vector3D vec3d_units;\n BackTransform(vec3d_mm, vec3d_units);\n vec3d_units[2] = 0;\n projectedVec3d_mm = GetParametricTransform()->TransformVector(vec3d_units);\n return true;\n}\n\nbool\nmitk::Geometry2D::Project(const mitk::Point3D & atPt3d_mm,\n const mitk::Vector3D &vec3d_mm, mitk::Vector3D &projectedVec3d_mm) const\n{\n MITK_WARN << \"Deprecated function! Call Project(vec3D,vec3D) instead.\";\n assert(m_BoundingBox.IsNotNull());\n\n Vector3D vec3d_units;\n BackTransform(atPt3d_mm, vec3d_mm, vec3d_units);\n vec3d_units[2] = 0;\n projectedVec3d_mm = GetParametricTransform()->TransformVector(vec3d_units);\n\n Point3D pt3d_units;\n BackTransform(atPt3d_mm, pt3d_units);\n return const_cast(m_BoundingBox.GetPointer())->IsInside(pt3d_units);\n}\n\n\nbool\nmitk::Geometry2D::Map(const mitk::Point3D & atPt3d_mm,\n const mitk::Vector3D &vec3d_mm, mitk::Vector2D &vec2d_mm) const\n{\n Point2D pt2d_mm_start, pt2d_mm_end;\n Point3D pt3d_mm_end;\n bool inside=Map(atPt3d_mm, pt2d_mm_start);\n pt3d_mm_end = atPt3d_mm+vec3d_mm;\n inside&=Map(pt3d_mm_end, pt2d_mm_end);\n vec2d_mm=pt2d_mm_end-pt2d_mm_start;\n return inside;\n}\n\n\nvoid\nmitk::Geometry2D::Map(const mitk::Point2D &/*atPt2d_mm*/,\n const mitk::Vector2D &/*vec2d_mm*/, mitk::Vector3D &/*vec3d_mm*/) const\n{\n //@todo implement parallel to the other Map method!\n assert(false);\n}\n\n\nmitk::ScalarType\nmitk::Geometry2D::SignedDistance(const mitk::Point3D& pt3d_mm) const\n{\n Point3D projectedPoint;\n Project(pt3d_mm, projectedPoint);\n Vector3D direction = pt3d_mm-projectedPoint;\n ScalarType distance = direction.GetNorm();\n\n if(IsAbove(pt3d_mm) == false)\n distance*=-1.0;\n\n return distance;\n}\n\nbool\nmitk::Geometry2D::IsAbove(const mitk::Point3D& pt3d_mm) const\n{\n Point3D pt3d_units;\n Geometry3D::WorldToIndex(pt3d_mm, pt3d_units);\n return (pt3d_units[2] > m_BoundingBox->GetBounds()[4]);\n}\n\nitk::LightObject::Pointer\nmitk::Geometry2D::InternalClone() const\n{\n Self::Pointer newGeometry = new Geometry2D(*this);\n newGeometry->UnRegister();\n return newGeometry.GetPointer();\n}\n\nvoid\nmitk::Geometry2D::PrintSelf(std::ostream& os, itk::Indent indent) const\n{\n Superclass::PrintSelf(os,indent);\n os << indent << \" ScaleFactorMMPerUnitX: \"\n << m_ScaleFactorMMPerUnitX << std::endl;\n os << indent << \" ScaleFactorMMPerUnitY: \"\n << m_ScaleFactorMMPerUnitY << std::endl;\n}\n\nvoid\nmitk::Geometry2D::SetReferenceGeometry( mitk::Geometry3D *geometry )\n{\n m_ReferenceGeometry = geometry;\n}\n\nmitk::Geometry3D *\nmitk::Geometry2D::GetReferenceGeometry() const\n{\n return m_ReferenceGeometry;\n}\n\nbool\nmitk::Geometry2D::HasReferenceGeometry() const\n{\n return ( m_ReferenceGeometry != NULL );\n}\n", "meta": {"content_hash": "d76ad4d788fa7a514bf6dab0c34c7577", "timestamp": "", "source": "github", "line_count": 270, "max_line_length": 98, "avg_line_length": 27.02962962962963, "alnum_prop": 0.7158125513839408, "repo_name": "nocnokneo/MITK", "id": "6f23c9171007a8f9826011462cd9b16b7117cf21", "size": "7796", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Core/Code/DataManagement/mitkGeometry2D.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "5991982"}, {"name": "C++", "bytes": "29934042"}, {"name": "CSS", "bytes": "52056"}, {"name": "IDL", "bytes": "5583"}, {"name": "Java", "bytes": "350330"}, {"name": "JavaScript", "bytes": "287054"}, {"name": "Objective-C", "bytes": "606620"}, {"name": "Perl", "bytes": "982"}, {"name": "Python", "bytes": "7545"}, {"name": "Shell", "bytes": "5438"}, {"name": "TeX", "bytes": "1204"}, {"name": "XSLT", "bytes": "30684"}]}} {"text": " '',\n );\n \n /**\n * creates a new importer from an importexport definition\n * \n * @param Tinebase_Model_ImportExportDefinition $_definition\n * @param array $_options\n * @return Calendar_Import_Ical\n * \n * @todo move this to abstract when we no longer need to be php 5.2 compatible\n */\n public static function createFromDefinition(Tinebase_Model_ImportExportDefinition $_definition, array $_options = array())\n {\n return new Addressbook_Import_Csv(self::getOptionsArrayFromDefinition($_definition, $_options));\n }\n\n /**\n * constructs a new importer from given config\n * \n * @param array $_options\n */\n public function __construct(array $_options = array())\n {\n parent::__construct($_options);\n \n // don't set geodata for imported contacts as this is too much traffic for the nominatim server\n $this->_controller->setGeoDataForContacts(FALSE);\n \n // get container id from default container if not set\n if (empty($this->_options['container_id'])) {\n $defaultContainer = $this->_controller->getDefaultAddressbook();\n $this->_options['container_id'] = $defaultContainer->getId();\n if (Tinebase_Core::isLogLevel(Zend_Log::DEBUG)) Tinebase_Core::getLogger()->debug(__METHOD__ . '::' . __LINE__ . ' Setting default container id: ' . $this->_options['container_id']);\n }\n }\n \n /**\n * add some more values (container id)\n *\n * @return array\n */\n protected function _addData()\n {\n $result['container_id'] = $this->_options['container_id'];\n return $result;\n }\n \n /**\n * do conversions\n * -> sanitize account_id\n *\n * @param array $_data\n * @return array\n */\n protected function _doConversions($_data)\n {\n $result = parent::_doConversions($_data);\n \n // unset account id\n if (isset($result['account_id']) && empty($result['account_id'])) {\n unset($result['account_id']);\n }\n \n if (empty($result['n_family']) && empty($result['org_name'])) {\n if (isset($result['n_fn'])) {\n $result['n_family'] = $result['n_fn'];\n } else {\n $result['n_family'] = 'imported';\n }\n } \n \n return $result;\n }\n}\n", "meta": {"content_hash": "4692399acf338869828c25c6dbb75132", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 194, "avg_line_length": 29.76086956521739, "alnum_prop": 0.5525931336742148, "repo_name": "jodier/tmpdddf", "id": "b468cdaf95d593eab1dca2ab0396388e72686c02", "size": "3028", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "web/private/tine20/Addressbook/Import/Csv.php", "mode": "33188", "license": "mit", "language": [{"name": "PHP", "bytes": "44010"}, {"name": "Perl", "bytes": "794"}, {"name": "Shell", "bytes": "286"}]}} {"text": "\npackage org.wildfly.swarm.cdi.jaxrsapi;\n\nimport org.wildfly.swarm.spi.api.Fraction;\nimport org.wildfly.swarm.spi.api.annotations.DeploymentModule;\n\n/**\n * @author Ken Finnigan\n */\n@DeploymentModule(name = \"org.wildfly.swarm.cdi.jaxrsapi\", slot = \"deployment\", export = true)\npublic class CDIJAXRSFraction implements Fraction {\n}\n", "meta": {"content_hash": "7a285bf3ec4fc8c88860717e8f563c37", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 94, "avg_line_length": 29.0, "alnum_prop": 0.7758620689655172, "repo_name": "kenfinnigan/wildfly-swarm", "id": "c5af1c743f752005e3e18535b7ffaf281e618ee6", "size": "975", "binary": false, "copies": "14", "ref": "refs/heads/master", "path": "fractions/cdi-extensions/cdi-jaxrsapi/src/main/java/org/wildfly/swarm/cdi/jaxrsapi/CDIJAXRSFraction.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "5821"}, {"name": "HTML", "bytes": "7818"}, {"name": "Java", "bytes": "3295457"}, {"name": "JavaScript", "bytes": "13301"}, {"name": "Ruby", "bytes": "5349"}, {"name": "Shell", "bytes": "8275"}, {"name": "XSLT", "bytes": "20396"}]}} {"text": "// REFACTOR IF POSSIBLE\n/**\n * Within Viewport\n *\n * @description Determines whether an element is completely\n * within the browser viewport\n * @author Craig Patik, http://patik.com/\n * @version 0.0.4\n * @date 2014-07-05\n */\n;(function() {\n /**\n * Determines whether an element is within the viewport\n * @param {Object} elem DOM Element (required)\n * @param {Object} options Optional settings\n * @return {Boolean} Whether the element was completely within the viewport\n */\n var withinViewport = function _withinViewport(elem, options) {\n var result = false,\n metadata = {},\n config = {},\n settings, useHtmlElem, isWithin, scrollOffset, elemOffset, arr, i, side;\n\n if (typeof jQuery !== 'undefined' && elem instanceof jQuery) {\n elem = elem.get(0);\n }\n\n if (typeof elem !== 'object' || elem.nodeType !== 1) {\n throw new Error('First argument must be an element');\n }\n\n if (elem.getAttribute('data-withinviewport-settings') && window.JSON) {\n metadata = JSON.parse(elem.getAttribute('data-withinviewport-settings'));\n }\n\n // Settings argument may be a simple string (`top`, `right`, etc)\n if (typeof options === 'string') {\n settings = {sides: options};\n }\n else {\n settings = options || {};\n }\n\n // Build configuration from defaults and given settings\n config.container = settings.container || metadata.container || withinViewport.defaults.container || document.body;\n config.sides = settings.sides || metadata.sides || withinViewport.defaults.sides || 'all';\n config.top = settings.top || metadata.top || withinViewport.defaults.top || 0;\n config.right = settings.right || metadata.right || withinViewport.defaults.right || 0;\n config.bottom = settings.bottom || metadata.bottom || withinViewport.defaults.bottom || 0;\n config.left = settings.left || metadata.left || withinViewport.defaults.left || 0;\n\n // Whether we can use the ` element for `scrollTop`\n // Unfortunately at the moment I can't find a way to do this without UA-sniffing\n useHtmlElem = !/Chrome/.test(navigator.userAgent);\n\n // Element testing methods\n isWithin = {\n // Element is below the top edge of the viewport\n top: function _isWithin_top() {\n return elemOffset[1] >= scrollOffset[1] + config.top;\n },\n\n // Element is to the left of the right edge of the viewport\n right: function _isWithin_right() {\n var container = (config.container === document.body) ? window : config.container;\n\n return elemOffset[0] + elem.offsetWidth <= container.innerWidth + scrollOffset[0] - config.right;\n },\n\n // Element is above the bottom edge of the viewport\n bottom: function _isWithin_bottom() {\n var container = (config.container === document.body) ? window : config.container;\n\n return elemOffset[1] + elem.offsetHeight <= scrollOffset[1] + container.innerHeight - config.bottom;\n },\n\n // Element is to the right of the left edge of the viewport\n left: function _isWithin_left() {\n return elemOffset[0] >= scrollOffset[0] + config.left;\n },\n\n all: function _isWithin_all() {\n return (isWithin.top() && isWithin.right() && isWithin.bottom() && isWithin.left());\n }\n };\n\n // Current offset values\n scrollOffset = (function _scrollOffset() {\n var x = config.container.scrollLeft,\n y = config.container.scrollTop;\n\n if (y === 0) {\n if (config.container.pageYOffset) {\n y = config.container.pageYOffset;\n }\n else if (window.pageYOffset) {\n y = window.pageYOffset;\n }\n else {\n if (config.container === document.body) {\n if (useHtmlElem) {\n y = (config.container.parentElement) ? config.container.parentElement.scrollTop : 0;\n }\n else {\n y = (config.container.parentElement) ? config.container.parentElement.scrollTop : 0;\n }\n }\n else {\n y = (config.container.parentElement) ? config.container.parentElement.scrollTop : 0;\n }\n }\n }\n\n if (x === 0) {\n if (config.container.pageXOffset) {\n x = config.container.pageXOffset;\n }\n else if (window.pageXOffset) {\n x = window.pageXOffset;\n }\n else {\n if (config.container === document.body) {\n x = (config.container.parentElement) ? config.container.parentElement.scrollLeft : 0;\n }\n else {\n x = (config.container.parentElement) ? config.container.parentElement.scrollLeft : 0;\n }\n }\n }\n\n return [x, y];\n }());\n\n elemOffset = (function _elemOffset() {\n var el = elem,\n x = 0,\n y = 0;\n\n if (el.parentNode) {\n x = el.offsetLeft;\n y = el.offsetTop;\n\n el = el.parentNode;\n while (el) {\n if (el == config.container) {\n break;\n }\n\n x += el.offsetLeft;\n y += el.offsetTop;\n\n el = el.parentNode;\n }\n }\n\n return [x, y];\n })();\n\n // Test the element against each side of the viewport that was requested\n arr = config.sides.split(' ');\n i = arr.length;\n while (i--) {\n side = arr[i].toLowerCase();\n if (/top|right|bottom|left|all/.test(side)) {\n if (isWithin[side]()) {\n result = true;\n }\n else {\n result = false;\n // Quit as soon as the first failure is found\n break;\n }\n }\n }\n\n return result;\n }; // end of `withinViewport()`\n\n // Default settings\n withinViewport.prototype.defaults = {\n container: document.body,\n sides: 'all',\n top: 0,\n right: 0,\n bottom: 0,\n left: 0\n };\n\n withinViewport.defaults = withinViewport.prototype.defaults;\n\n // Make function available globally\n window.withinViewport = withinViewport;\n\n /**\n * Optional enhancements and shortcuts\n *\n * @description Uncomment or comment these pieces as they apply to your project and coding preferences\n */\n\n // Shortcut methods for each side of the viewport\n // Ex: withinViewport.top(elem) is the same as withinViewport(elem, 'top')\n withinViewport.prototype.top = function _withinViewport_top(element) {\n return withinViewport(element, 'top');\n };\n\n withinViewport.prototype.right = function _withinViewport_right(element) {\n return withinViewport(element, 'right');\n };\n\n withinViewport.prototype.bottom = function _withinViewport_bottom(element) {\n return withinViewport(element, 'bottom');\n };\n\n withinViewport.prototype.left = function _withinViewport_left(element) {\n return withinViewport(element, 'left');\n };\n}());", "meta": {"content_hash": "ebdf763bbe85efd426f5ce70a7d12c49", "timestamp": "", "source": "github", "line_count": 217, "max_line_length": 118, "avg_line_length": 31.387096774193548, "alnum_prop": 0.601673763030392, "repo_name": "clarat-org/clarat", "id": "bd811615e8745b7b69dd1e85a63e09b3efa74330", "size": "6811", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "app/assets/javascripts/vendor/inviewport.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "152114"}, {"name": "CoffeeScript", "bytes": "84489"}, {"name": "HTML", "bytes": "835592"}, {"name": "JavaScript", "bytes": "33443"}, {"name": "Ruby", "bytes": "216461"}, {"name": "Shell", "bytes": "373"}]}} {"text": "\n\npackage com.microsoft.azure.management.network.v2019_08_01;\n\nimport java.util.List;\nimport com.microsoft.azure.management.network.v2019_08_01.implementation.SecurityRuleInner;\nimport com.fasterxml.jackson.annotation.JsonProperty;\n\n/**\n * Subnet and it's custom security rules.\n */\npublic class SubnetAssociation {\n /**\n * Subnet ID.\n */\n @JsonProperty(value = \"id\", access = JsonProperty.Access.WRITE_ONLY)\n private String id;\n\n /**\n * Collection of custom security rules.\n */\n @JsonProperty(value = \"securityRules\")\n private List securityRules;\n\n /**\n * Get subnet ID.\n *\n * @return the id value\n */\n public String id() {\n return this.id;\n }\n\n /**\n * Get collection of custom security rules.\n *\n * @return the securityRules value\n */\n public List securityRules() {\n return this.securityRules;\n }\n\n /**\n * Set collection of custom security rules.\n *\n * @param securityRules the securityRules value to set\n * @return the SubnetAssociation object itself.\n */\n public SubnetAssociation withSecurityRules(List securityRules) {\n this.securityRules = securityRules;\n return this;\n }\n\n}\n", "meta": {"content_hash": "6e2519276d259d6f4c460690ffc2b435", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 91, "avg_line_length": 23.703703703703702, "alnum_prop": 0.6546875, "repo_name": "navalev/azure-sdk-for-java", "id": "17de1a8a17b3a29564b43b54bddcf28ccd71fc84", "size": "1510", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "sdk/network/mgmt-v2019_08_01/src/main/java/com/microsoft/azure/management/network/v2019_08_01/SubnetAssociation.java", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "7230"}, {"name": "CSS", "bytes": "5411"}, {"name": "Groovy", "bytes": "1570436"}, {"name": "HTML", "bytes": "29221"}, {"name": "Java", "bytes": "250218562"}, {"name": "JavaScript", "bytes": "15605"}, {"name": "PowerShell", "bytes": "30924"}, {"name": "Python", "bytes": "42119"}, {"name": "Shell", "bytes": "1408"}]}} {"text": "require 'active_record'\n\n# establish connection\nActiveRecord::Base.configurations = App.config.database_yml\nActiveRecord::Base.establish_connection App.config.database_settings\n\nif ActiveRecord::VERSION::MAJOR == 2\n\n class RubyAppDumpLogger\n def method_missing(method, *args)\n end\n end\n\n ActiveRecord::Base.logger = RubyAppDumpLogger.new\nend\n", "meta": {"content_hash": "a4201f433fc4e8a8bc513fb025509435", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 68, "avg_line_length": 23.4, "alnum_prop": 0.7806267806267806, "repo_name": "kostya/ruby-app-ar", "id": "c09bd03a50abd449f66f05c1ebd805ba095acc56", "size": "379", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/init-activerecord.rb", "mode": "33188", "license": "mit", "language": [{"name": "Ruby", "bytes": "21278"}]}} {"text": "\r\n\r\n/**\r\n* Create a new `Particle` object. Particles are extended Sprites that are emitted by a particle emitter such as Phaser.Particles.Arcade.Emitter.\r\n* \r\n* @class Phaser.Particle\r\n* @constructor\r\n* @extends Phaser.Sprite\r\n* @param {Phaser.Game} game - A reference to the currently running game.\r\n* @param {number} x - The x coordinate (in world space) to position the Particle at.\r\n* @param {number} y - The y coordinate (in world space) to position the Particle at.\r\n* @param {string|Phaser.RenderTexture|Phaser.BitmapData|PIXI.Texture} key - This is the image or texture used by the Particle during rendering. It can be a string which is a reference to the Cache entry, or an instance of a RenderTexture or PIXI.Texture.\r\n* @param {string|number} frame - If this Particle is using part of a sprite sheet or texture atlas you can specify the exact frame to use by giving a string or numeric index.\r\n*/\r\nPhaser.Particle = function (game, x, y, key, frame) {\r\n\r\n Phaser.Sprite.call(this, game, x, y, key, frame);\r\n\r\n /**\r\n * @property {boolean} autoScale - If this Particle automatically scales this is set to true by Particle.setScaleData.\r\n * @protected\r\n */\r\n this.autoScale = false;\r\n\r\n /**\r\n * @property {array} scaleData - A reference to the scaleData array owned by the Emitter that emitted this Particle.\r\n * @protected\r\n */\r\n this.scaleData = null;\r\n\r\n /**\r\n * @property {number} _s - Internal cache var for tracking auto scale.\r\n * @private\r\n */\r\n this._s = 0;\r\n\r\n /**\r\n * @property {boolean} autoAlpha - If this Particle automatically changes alpha this is set to true by Particle.setAlphaData.\r\n * @protected\r\n */\r\n this.autoAlpha = false;\r\n\r\n /**\r\n * @property {array} alphaData - A reference to the alphaData array owned by the Emitter that emitted this Particle.\r\n * @protected\r\n */\r\n this.alphaData = null;\r\n\r\n /**\r\n * @property {number} _a - Internal cache var for tracking auto alpha.\r\n * @private\r\n */\r\n this._a = 0;\r\n\r\n};\r\n\r\nPhaser.Particle.prototype = Object.create(Phaser.Sprite.prototype);\r\nPhaser.Particle.prototype.constructor = Phaser.Particle;\r\n\r\n/**\r\n* Updates the Particle scale or alpha if autoScale and autoAlpha are set.\r\n*\r\n* @method Phaser.Particle#update\r\n* @memberof Phaser.Particle\r\n*/\r\nPhaser.Particle.prototype.update = function() {\r\n\r\n if (this.autoScale)\r\n {\r\n this._s--;\r\n\r\n if (this._s)\r\n {\r\n this.scale.set(this.scaleData[this._s].x, this.scaleData[this._s].y);\r\n }\r\n else\r\n {\r\n this.autoScale = false;\r\n }\r\n }\r\n\r\n if (this.autoAlpha)\r\n {\r\n this._a--;\r\n\r\n if (this._a)\r\n {\r\n this.alpha = this.alphaData[this._a].v;\r\n }\r\n else\r\n {\r\n this.autoAlpha = false;\r\n }\r\n }\r\n\r\n};\r\n\r\n/**\r\n* Called by the Emitter when this particle is emitted. Left empty for you to over-ride as required.\r\n*\r\n* @method Phaser.Particle#onEmit\r\n* @memberof Phaser.Particle\r\n*/\r\nPhaser.Particle.prototype.onEmit = function() {\r\n};\r\n\r\n/**\r\n* Called by the Emitter if autoAlpha has been enabled. Passes over the alpha ease data and resets the alpha counter.\r\n*\r\n* @method Phaser.Particle#setAlphaData\r\n* @memberof Phaser.Particle\r\n*/\r\nPhaser.Particle.prototype.setAlphaData = function(data) {\r\n\r\n this.alphaData = data;\r\n this._a = data.length - 1;\r\n this.alpha = this.alphaData[this._a].v;\r\n this.autoAlpha = true;\r\n\r\n};\r\n\r\n/**\r\n* Called by the Emitter if autoScale has been enabled. Passes over the scale ease data and resets the scale counter.\r\n*\r\n* @method Phaser.Particle#setScaleData\r\n* @memberof Phaser.Particle\r\n*/\r\nPhaser.Particle.prototype.setScaleData = function(data) {\r\n\r\n this.scaleData = data;\r\n this._s = data.length - 1;\r\n this.scale.set(this.scaleData[this._s].x, this.scaleData[this._s].y);\r\n this.autoScale = true;\r\n\r\n};\r\n\r\n/**\r\n* Resets the Particle. This places the Particle at the given x/y world coordinates and then\r\n* sets alive, exists, visible and renderable all to true. Also resets the outOfBounds state and health values.\r\n* If the Particle has a physics body that too is reset.\r\n*\r\n* @method Phaser.Particle#reset\r\n* @memberof Phaser.Particle\r\n* @param {number} x - The x coordinate (in world space) to position the Particle at.\r\n* @param {number} y - The y coordinate (in world space) to position the Particle at.\r\n* @param {number} [health=1] - The health to give the Particle.\r\n* @return {Phaser.Particle} This instance.\r\n*/\r\nPhaser.Particle.prototype.reset = function(x, y, health) {\r\n\r\n Phaser.Component.Reset.prototype.reset.call(this, x, y, health);\r\n\r\n this.alpha = 1;\r\n this.scale.set(1);\r\n\r\n this.autoScale = false;\r\n this.autoAlpha = false;\r\n\r\n return this;\r\n\r\n};\r\n", "meta": {"content_hash": "24edaa06ce46fb9dbca9b7593e4e267c", "timestamp": "", "source": "github", "line_count": 161, "max_line_length": 254, "avg_line_length": 29.93167701863354, "alnum_prop": 0.6480597634363976, "repo_name": "Jufebrown/mech-combat", "id": "1e141255b39b7afd2cdf54a3fdf62c0d83149b56", "size": "5019", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "node_modules/phaser-ce/src/gameobjects/Particle.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "77"}, {"name": "HTML", "bytes": "1479"}, {"name": "JavaScript", "bytes": "34303"}]}} {"text": "/**\n * Array reduceRight\n */\nfunction reduceRight(arr, fn, initVal) {\n // check for args.length since initVal might be \"undefined\" see #gh-57\n var hasInit = arguments.length > 2;\n\n if (arr == null || !arr.length) {\n if (hasInit) {\n return initVal;\n } else {\n throw new Error('reduce of empty array with no initial value');\n }\n }\n\n var i = arr.length, result = initVal, value;\n while (--i >= 0) {\n // we iterate over sparse items since there is no way to make it\n // work properly on IE 7-8. see #64\n value = arr[i];\n if (!hasInit) {\n result = value;\n hasInit = true;\n } else {\n result = fn(result, value, i, arr);\n }\n }\n return result;\n}\n\nmodule.exports = reduceRight;\n\n", "meta": {"content_hash": "8593925d98048fd6aa86361d02759e51", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 75, "avg_line_length": 25.375, "alnum_prop": 0.5320197044334976, "repo_name": "ionutbarau/petstore", "id": "7bd3e6b7829862d599317c23473b5b8f37856980", "size": "812", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "petstore-app/src/main/resources/static/node_modules/bower/lib/node_modules/bower-config/node_modules/mout/array/reduceRight.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "5006"}, {"name": "CSS", "bytes": "344"}, {"name": "HTML", "bytes": "10564"}, {"name": "Java", "bytes": "40634"}, {"name": "JavaScript", "bytes": "11738"}, {"name": "Roff", "bytes": "52499364"}, {"name": "Shell", "bytes": "7058"}]}} {"text": "require_relative 'spec_helper'\n\nmodule LetsCert\n\n describe IOPlugin do\n\n it '.empty_data always returns the same hash' do\n hsh = IOPlugin.empty_data\n\n expect(hsh.keys.size).to eq(4)\n [:account_key, :key, :cert, :chain].each do |key|\n expect(hsh.keys).to include(key)\n expect(hsh[key]).to be_nil\n end\n end\n\n it '.register registers known subclasses' do\n names = %w(account_key.json key.pem key.der chain.pem fullchain.pem)\n names += %w(cert.pem cert.der)\n\n names.each do |name|\n expect(IOPlugin.registered.keys).to include(name)\n end\n end\n\n it '.register may register new classes' do\n class NewIO < IOPlugin;end\n IOPlugin.register(NewIO, 'newio')\n\n expect(IOPlugin.registered.keys).to include('newio')\n expect(IOPlugin.registered['newio']).to be_a(NewIO)\n end\n\n it '.register raises when plugin name contains a path' do\n class NewIO2 < IOPlugin; end\n expect { IOPlugin.register(NewIO2, 'new/io') }.to raise_error(LetsCert::Error)\n end\n\n it '#load raises NotImplementedError' do\n expect { IOPlugin.new('a').load }.to raise_error(NotImplementedError)\n end\n\n it '#save raises NotImplementedError' do\n expect { IOPlugin.new('a').save }.to raise_error(NotImplementedError)\n end\n\n end\n\nend\n", "meta": {"content_hash": "d115b0fbd557578dc93ce84fe7ad578b", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 84, "avg_line_length": 26.979591836734695, "alnum_prop": 0.6527987897125568, "repo_name": "sdaubert/letscert", "id": "0eb2f856b0e2f8ef602c3dfa0b92fc5a862992b8", "size": "1322", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/io_plugin_spec.rb", "mode": "33188", "license": "mit", "language": [{"name": "Ruby", "bytes": "103056"}]}} {"text": "extern \"Java\"\n{\n namespace gnu\n {\n namespace java\n {\n namespace net\n {\n namespace protocol\n {\n namespace jar\n {\n class Handler;\n }\n }\n }\n }\n }\n namespace java\n {\n namespace net\n {\n class URL;\n class URLConnection;\n }\n }\n}\n\nclass gnu::java::net::protocol::jar::Handler : public ::java::net::URLStreamHandler\n{\n\npublic:\n Handler();\npublic: // actually protected\n virtual ::java::net::URLConnection * openConnection(::java::net::URL *);\n virtual void parseURL(::java::net::URL *, ::java::lang::String *, jint, jint);\nprivate:\n static ::java::lang::String * flat(::java::lang::String *);\npublic: // actually protected\n virtual ::java::lang::String * toExternalForm(::java::net::URL *);\npublic:\n static ::java::lang::Class class$;\n};\n\n#endif // __gnu_java_net_protocol_jar_Handler__\n", "meta": {"content_hash": "9fa4241a43d907d9ac8bc6d70a93f4f6", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 83, "avg_line_length": 19.955555555555556, "alnum_prop": 0.5734966592427617, "repo_name": "the-linix-project/linix-kernel-source", "id": "b5fb6b66e640689c067a515654c8dea5f97f2b13", "size": "1114", "binary": false, "copies": "160", "ref": "refs/heads/master", "path": "gccsrc/gcc-4.7.2/libjava/gnu/java/net/protocol/jar/Handler.h", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "Ada", "bytes": "38139979"}, {"name": "Assembly", "bytes": "3723477"}, {"name": "Awk", "bytes": "83739"}, {"name": "C", "bytes": "103607293"}, {"name": "C#", "bytes": "55726"}, {"name": "C++", "bytes": "38577421"}, {"name": "CLIPS", "bytes": "6933"}, {"name": "CSS", "bytes": "32588"}, {"name": "Emacs Lisp", "bytes": "13451"}, {"name": "FORTRAN", "bytes": "4294984"}, {"name": "GAP", "bytes": "13089"}, {"name": "Go", "bytes": "11277335"}, {"name": "Haskell", "bytes": "2415"}, {"name": "Java", "bytes": "45298678"}, {"name": "JavaScript", "bytes": "6265"}, {"name": "Matlab", "bytes": "56"}, {"name": "OCaml", "bytes": "148372"}, {"name": "Objective-C", "bytes": "995127"}, {"name": "Objective-C++", "bytes": "436045"}, {"name": "PHP", "bytes": "12361"}, {"name": "Pascal", "bytes": "40318"}, {"name": "Perl", "bytes": "358808"}, {"name": "Python", "bytes": "60178"}, {"name": "SAS", "bytes": "1711"}, {"name": "Scilab", "bytes": "258457"}, {"name": "Shell", "bytes": "2610907"}, {"name": "Tcl", "bytes": "17983"}, {"name": "TeX", "bytes": "1455571"}, {"name": "XSLT", "bytes": "156419"}]}} {"text": "namespace content_settings {\n\nclass WebsiteSettingsRegistryTest : public testing::Test {\n protected:\n WebsiteSettingsRegistry* registry() { return ®istry_; }\n\n private:\n WebsiteSettingsRegistry registry_;\n};\n\nTEST_F(WebsiteSettingsRegistryTest, Get) {\n // CONTENT_SETTINGS_TYPE_AUTO_SELECT_CERTIFICATE should be registered.\n const WebsiteSettingsInfo* info =\n registry()->Get(CONTENT_SETTINGS_TYPE_AUTO_SELECT_CERTIFICATE);\n ASSERT_TRUE(info);\n EXPECT_EQ(CONTENT_SETTINGS_TYPE_AUTO_SELECT_CERTIFICATE, info->type());\n EXPECT_EQ(\"auto-select-certificate\", info->name());\n}\n\nTEST_F(WebsiteSettingsRegistryTest, GetByName) {\n // Random string shouldn't be registered.\n EXPECT_FALSE(registry()->GetByName(\"abc\"));\n\n // \"auto-select-certificate\" should be registered.\n const WebsiteSettingsInfo* info =\n registry()->GetByName(\"auto-select-certificate\");\n ASSERT_TRUE(info);\n EXPECT_EQ(CONTENT_SETTINGS_TYPE_AUTO_SELECT_CERTIFICATE, info->type());\n EXPECT_EQ(\"auto-select-certificate\", info->name());\n EXPECT_EQ(registry()->Get(CONTENT_SETTINGS_TYPE_AUTO_SELECT_CERTIFICATE),\n info);\n\n // Register a new setting.\n registry()->Register(static_cast(10), \"test\", nullptr,\n WebsiteSettingsInfo::UNSYNCABLE,\n WebsiteSettingsInfo::LOSSY,\n WebsiteSettingsInfo::TOP_LEVEL_ORIGIN_ONLY_SCOPE,\n WebsiteSettingsRegistry::ALL_PLATFORMS,\n WebsiteSettingsInfo::INHERIT_IN_INCOGNITO);\n info = registry()->GetByName(\"test\");\n ASSERT_TRUE(info);\n EXPECT_EQ(10, info->type());\n EXPECT_EQ(\"test\", info->name());\n EXPECT_EQ(registry()->Get(static_cast(10)), info);\n}\n\nTEST_F(WebsiteSettingsRegistryTest, GetPlatformDependent) {\n#if defined(OS_IOS)\n // App banner shouldn't be registered on iOS.\n EXPECT_FALSE(registry()->Get(CONTENT_SETTINGS_TYPE_APP_BANNER));\n#else\n // App banner should be registered on other platforms.\n EXPECT_TRUE(registry()->Get(CONTENT_SETTINGS_TYPE_APP_BANNER));\n#endif\n\n // Auto select certificate is registered on all platforms.\n EXPECT_TRUE(registry()->Get(CONTENT_SETTINGS_TYPE_AUTO_SELECT_CERTIFICATE));\n}\n\nTEST_F(WebsiteSettingsRegistryTest, Properties) {\n // \"auto-select-certificate\" should be registered.\n const WebsiteSettingsInfo* info =\n registry()->Get(CONTENT_SETTINGS_TYPE_AUTO_SELECT_CERTIFICATE);\n ASSERT_TRUE(info);\n EXPECT_EQ(\"profile.content_settings.exceptions.auto_select_certificate\",\n info->pref_name());\n EXPECT_EQ(\"profile.default_content_setting_values.auto_select_certificate\",\n info->default_value_pref_name());\n ASSERT_FALSE(info->initial_default_value());\n EXPECT_EQ(PrefRegistry::NO_REGISTRATION_FLAGS,\n info->GetPrefRegistrationFlags());\n\n // Register a new setting.\n registry()->Register(static_cast(10), \"test\",\n base::WrapUnique(new base::FundamentalValue(999)),\n WebsiteSettingsInfo::SYNCABLE,\n WebsiteSettingsInfo::LOSSY,\n WebsiteSettingsInfo::TOP_LEVEL_ORIGIN_ONLY_SCOPE,\n WebsiteSettingsRegistry::ALL_PLATFORMS,\n WebsiteSettingsInfo::INHERIT_IN_INCOGNITO);\n info = registry()->Get(static_cast(10));\n ASSERT_TRUE(info);\n EXPECT_EQ(\"profile.content_settings.exceptions.test\", info->pref_name());\n EXPECT_EQ(\"profile.default_content_setting_values.test\",\n info->default_value_pref_name());\n int setting;\n ASSERT_TRUE(info->initial_default_value()->GetAsInteger(&setting));\n EXPECT_EQ(999, setting);\n#if defined(OS_IOS)\n EXPECT_EQ(PrefRegistry::LOSSY_PREF, info->GetPrefRegistrationFlags());\n#else\n EXPECT_EQ(PrefRegistry::LOSSY_PREF |\n user_prefs::PrefRegistrySyncable::SYNCABLE_PREF,\n info->GetPrefRegistrationFlags());\n#endif\n EXPECT_EQ(WebsiteSettingsInfo::TOP_LEVEL_ORIGIN_ONLY_SCOPE,\n info->scoping_type());\n EXPECT_EQ(WebsiteSettingsInfo::INHERIT_IN_INCOGNITO,\n info->incognito_behavior());\n}\n\nTEST_F(WebsiteSettingsRegistryTest, Iteration) {\n registry()->Register(static_cast(10), \"test\",\n base::WrapUnique(new base::FundamentalValue(999)),\n WebsiteSettingsInfo::SYNCABLE,\n WebsiteSettingsInfo::LOSSY,\n WebsiteSettingsInfo::TOP_LEVEL_ORIGIN_ONLY_SCOPE,\n WebsiteSettingsRegistry::ALL_PLATFORMS,\n WebsiteSettingsInfo::INHERIT_IN_INCOGNITO);\n\n bool found = false;\n for (const WebsiteSettingsInfo* info : *registry()) {\n EXPECT_EQ(registry()->Get(info->type()), info);\n if (info->type() == 10) {\n EXPECT_FALSE(found);\n found = true;\n }\n }\n\n EXPECT_TRUE(found);\n}\n\n} // namespace content_settings\n", "meta": {"content_hash": "79a080a21e9aacd0ed464a6b6425897c", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 78, "avg_line_length": 39.89430894308943, "alnum_prop": 0.6723048705930303, "repo_name": "danakj/chromium", "id": "5e3630c0a84f4fdb7104f1a6dedfd1e6ffe9d708", "size": "5614", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "components/content_settings/core/browser/website_settings_registry_unittest.cc", "mode": "33188", "license": "bsd-3-clause", "language": []}} {"text": "import * as React from \"react\";\nimport ReactModal from \"react-modal\";\n\nimport { Config } from \"../../../data/Config\";\n\nimport { OnMobile, OnMobileTablet, OnDesktop, OnTablet } from \"../../../helpers/Breakpoints\";\nimport { smoothScrollTo } from \"../../../helpers/smoothScrollTo\";\nimport { formatNumber } from \"../../../helpers/formatNumber\";\nimport { translate } from \"../../../helpers/translate\";\n\nimport { TransformAnimation } from \"../../Animations/Interactive/TransformAnimation\";\nimport { getCorners, getLabel, CloseButton, SubmitButton } from \"../../Buttons\";\nimport { SocialLinks } from \"../../Layout/Partials\";\nimport { PartnershipPageState } from \"./PartnershipPageState\";\nimport { MapIcon } from \"../../Icons/MapIcon\";\nimport { ContactForm } from \"./ContactForm\";\nimport { BasePage } from \"../BasePage\";\n\nexport class PartnershipPage extends BasePage<{}, PartnershipPageState> {\n public state: PartnershipPageState = {\n isModalOpen: false,\n };\n\n public shouldComponentUpdate(nextProps: undefined, nextState: PartnershipPageState, nextContext: any): boolean {\n return super.shouldComponentUpdate(nextProps, nextState, nextContext)\n || this.state.isModalOpen !== nextState.isModalOpen;\n }\n\n public render(): JSX.Element {\n const modalProps = {\n className: {\n base: \"modal\",\n afterOpen: \"\",\n beforeClose: \"\",\n },\n overlayClassName: {\n base: \"modal-overlay\",\n afterOpen: \"modal-opened\",\n beforeClose: \"modal-close\",\n },\n closeTimeoutMS: 500\n };\n\n const transformAnimationProps = {\n transformedComponent: ,\n staticComponent: getCorners(),\n initialComponent: getLabel(),\n className: \"btn btn_transform\",\n onEvent() {\n const { className, duration } = this as any;\n smoothScrollTo(\n document.getElementsByClassName(className)[ 0 ] as HTMLElement,\n -105,\n \"top\",\n duration,\n 0\n );\n },\n duration: 1000,\n event: \"onClick\"\n };\n\n return (\n
\n
\n

{translate(\"contactPage.title\")}

\n
\n \n \n \n
\n
\n
\n \n \n {formatNumber(Config.phone, \"+xxx xx xxx-xx-xx\")}\n \n {Config.mail}\n

\n {translate(\"contactPage.support.title\")}\n {translate(\"contactPage.support.subTitle\")}\n 24\n /7\n \n

\n

\n {translate(\"contactPage.location.title\")}\n {translate(Config.location.country)} \n /\n  {translate(Config.location.city)}\n

\n
\n \n

\n {translate(\"contactPage.support.title\")}\n {translate(\"contactPage.support.subTitle\")}\n 24/7\n

\n \n \n

\n {translate(\"contactPage.location.title\")}\n {translate(Config.location.country)} / {translate(Config.location.city)}\n

\n
\n \n

\n {translate(\"contactPage.support.title\")}\n {translate(\"contactPage.support.subTitle\")}\n 24/7\n

\n

\n {translate(\"contactPage.location.title\")}\n {translate(Config.location.country)} / {translate(Config.location.city)}\n

\n \n \n \n
\n
\n
\n \n \n
\n \n \n \n \n

{translate(\"contactPage.form.title\")}

\n
\n
\n \n
\n \n
\n
\n \n
\n
\n
\n
\n
\n
\n \n {Config.location.country} / {Config.location.city}\n \n \n {Config.location.coordinates.lat}° \n {Config.location.coordinates.lng}°\n \n
\n
\n
\n
\n
\n );\n }\n\n protected handleCloseModal = () => {\n this.setState({ isModalOpen: false });\n };\n\n protected handleOpenModal = () => {\n this.setState({ isModalOpen: true });\n };\n}\n", "meta": {"content_hash": "c80d081dd615ce08644f050956bfc6e3", "timestamp": "", "source": "github", "line_count": 184, "max_line_length": 118, "avg_line_length": 51.56521739130435, "alnum_prop": 0.4267495784148398, "repo_name": "wearesho-team/wearesho-site", "id": "fd2e5a37cc3f947a11a44e2e702332375fc1dbcf", "size": "9488", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/components/Pages/PartnershipPage/PartnershipPage.tsx", "mode": "33188", "license": "mit", "language": [{"name": "Dockerfile", "bytes": "354"}, {"name": "Groovy", "bytes": "778"}, {"name": "HTML", "bytes": "3074"}, {"name": "JavaScript", "bytes": "10809"}, {"name": "SCSS", "bytes": "128541"}, {"name": "TypeScript", "bytes": "331858"}]}} {"text": "'use strict';\n\n/**\n * @ngdoc directive\n * @name izzyposWebApp.directive:adminPosHeader\n * @description\n * # adminPosHeader\n */\nangular.module('sbAdminApp')\n\t.directive('siteheader',function(){\n\t\treturn {\n templateUrl:'scripts/directives/header/siteheader.html',\n restrict: 'E',\n replace: true\n \t}\n\t}).directive('dashboardheader',function(){\n\t\treturn {\n templateUrl:'scripts/directives/header/dashboard_header.html',\n restrict: 'E',\n replace: true\n \t}\n\t}).directive('cpdashboardheader',function(){\n return {\n templateUrl:'scripts/directives/header/cp_dashboard_header.html',\n restrict: 'E',\n replace: true\n }\n }).directive('vendordashboardheader',function(){\n return {\n templateUrl:'scripts/directives/header/vendor_dashboard_header.html',\n restrict: 'E',\n replace: true\n }\n }).directive('admindashboardheader',function(){\n return {\n templateUrl:'scripts/directives/header/admin_dashboard_header.html',\n restrict: 'E',\n replace: true\n }\n });\n\n\n\n\n\n", "meta": {"content_hash": "2dc593900bc99f05643f2e599332ceae", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 81, "avg_line_length": 25.42222222222222, "alnum_prop": 0.5961538461538461, "repo_name": "kodebees/testing-sandbox", "id": "922b7a66ec6d6c65857d24e550d04489b4c7e45d", "size": "1144", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/scripts/directives/header/header.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ApacheConf", "bytes": "24139"}, {"name": "CSS", "bytes": "16010"}, {"name": "HTML", "bytes": "275958"}, {"name": "JavaScript", "bytes": "62897"}]}} {"text": "const express = require('express');\nconst router = express.Router();\n\n// const Note = require('../models/note.model'); // need this for below router.params\nconst note = require('../controllers/note.ctrl');\n\n// router.params('id', (req, res, next, id) => { // if an id parameter is present, this grabs it so we dont need\n// Note.findById(id, (err, note) => { // multiple \".findById()\" methods in the controller\n// if(err) throw err;\n// req.note = note;\n// next();\n// });\n// });\n\nrouter.get('/notes', note.index);\nrouter.get('/notes/new', note.newNote);\nrouter.get('/notes/:id', note.show);\nrouter.get('/notes/:id/edit', note.edit);\nrouter.put('/notes/:id', note.update);\nrouter.delete('/notes/:id', note.destroy);\nrouter.post('/notes', note.create);\n\nmodule.exports = router;\n", "meta": {"content_hash": "8c26a0a30648e8179e27b55b7bcd7451", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 112, "avg_line_length": 34.78260869565217, "alnum_prop": 0.63375, "repo_name": "ldougher06/c11_evernode", "id": "56216058d1e93026de87b3dd5e466ccfb5da8af7", "size": "800", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "routes/note.route.js", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "989"}, {"name": "JavaScript", "bytes": "4648"}]}} {"text": "\n\npackage org.springframework.boot.actuate.autoconfigure.metrics.test;\n\nimport java.util.Collections;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.concurrent.CyclicBarrier;\n\nimport javax.servlet.DispatcherType;\n\nimport io.micrometer.core.instrument.MeterRegistry;\nimport io.micrometer.core.instrument.MockClock;\nimport io.micrometer.core.instrument.binder.MeterBinder;\nimport io.micrometer.core.instrument.binder.jvm.JvmMemoryMetrics;\nimport io.micrometer.core.instrument.binder.logging.LogbackMetrics;\nimport io.micrometer.core.instrument.simple.SimpleConfig;\nimport io.micrometer.core.instrument.simple.SimpleMeterRegistry;\nimport org.junit.jupiter.api.Test;\n\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.boot.actuate.autoconfigure.metrics.JvmMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.LogbackMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.MetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.SystemMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.amqp.RabbitMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.cache.CacheMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.jdbc.DataSourcePoolMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.orm.jpa.HibernateMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.web.client.HttpClientMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.web.reactive.WebFluxMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.autoconfigure.metrics.web.servlet.WebMvcMetricsAutoConfiguration;\nimport org.springframework.boot.actuate.metrics.web.servlet.WebMvcMetricsFilter;\nimport org.springframework.boot.autoconfigure.ImportAutoConfiguration;\nimport org.springframework.boot.autoconfigure.http.HttpMessageConvertersAutoConfiguration;\nimport org.springframework.boot.autoconfigure.jackson.JacksonAutoConfiguration;\nimport org.springframework.boot.autoconfigure.web.client.RestTemplateAutoConfiguration;\nimport org.springframework.boot.autoconfigure.web.servlet.DispatcherServletAutoConfiguration;\nimport org.springframework.boot.autoconfigure.web.servlet.ServletWebServerFactoryAutoConfiguration;\nimport org.springframework.boot.autoconfigure.web.servlet.WebMvcAutoConfiguration;\nimport org.springframework.boot.test.context.SpringBootTest;\nimport org.springframework.boot.test.context.SpringBootTest.WebEnvironment;\nimport org.springframework.boot.test.web.client.TestRestTemplate;\nimport org.springframework.boot.web.client.RestTemplateBuilder;\nimport org.springframework.boot.web.servlet.FilterRegistrationBean;\nimport org.springframework.context.ApplicationContext;\nimport org.springframework.context.annotation.Bean;\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.context.annotation.Import;\nimport org.springframework.context.annotation.Primary;\nimport org.springframework.http.HttpMethod;\nimport org.springframework.http.MediaType;\nimport org.springframework.test.util.ReflectionTestUtils;\nimport org.springframework.test.web.client.MockRestServiceServer;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.RestController;\nimport org.springframework.web.client.RestTemplate;\n\nimport static org.assertj.core.api.Assertions.assertThat;\nimport static org.springframework.test.web.client.ExpectedCount.once;\nimport static org.springframework.test.web.client.match.MockRestRequestMatchers.method;\nimport static org.springframework.test.web.client.match.MockRestRequestMatchers.requestTo;\nimport static org.springframework.test.web.client.response.MockRestResponseCreators.withSuccess;\n\n/**\n * Integration tests for Metrics.\n *\n * @author Jon Schneider\n */\n@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT,\n\t\tclasses = MetricsIntegrationTests.MetricsApp.class,\n\t\tproperties = \"management.metrics.use-global-registry=false\")\npublic class MetricsIntegrationTests {\n\n\t@Autowired\n\tprivate ApplicationContext context;\n\n\t@Autowired\n\tprivate RestTemplate external;\n\n\t@Autowired\n\tprivate TestRestTemplate loopback;\n\n\t@Autowired\n\tprivate MeterRegistry registry;\n\n\t@SuppressWarnings(\"unchecked\")\n\t@Test\n\tpublic void restTemplateIsInstrumented() {\n\t\tMockRestServiceServer server = MockRestServiceServer.bindTo(this.external)\n\t\t\t\t.build();\n\t\tserver.expect(once(), requestTo(\"/api/external\"))\n\t\t\t\t.andExpect(method(HttpMethod.GET)).andRespond(withSuccess(\n\t\t\t\t\t\t\"{\\\"message\\\": \\\"hello\\\"}\", MediaType.APPLICATION_JSON));\n\t\tassertThat(this.external.getForObject(\"/api/external\", Map.class))\n\t\t\t\t.containsKey(\"message\");\n\t\tassertThat(this.registry.get(\"http.client.requests\").timer().count())\n\t\t\t\t.isEqualTo(1);\n\t}\n\n\t@Test\n\tpublic void requestMappingIsInstrumented() {\n\t\tthis.loopback.getForObject(\"/api/people\", Set.class);\n\t\tassertThat(this.registry.get(\"http.server.requests\").timer().count())\n\t\t\t\t.isEqualTo(1);\n\t}\n\n\t@Test\n\tpublic void automaticallyRegisteredBinders() {\n\t\tassertThat(this.context.getBeansOfType(MeterBinder.class).values())\n\t\t\t\t.hasAtLeastOneElementOfType(LogbackMetrics.class)\n\t\t\t\t.hasAtLeastOneElementOfType(JvmMemoryMetrics.class);\n\t}\n\n\t@Test\n\t@SuppressWarnings({ \"rawtypes\", \"unchecked\" })\n\tpublic void metricsFilterRegisteredForAsyncDispatches() {\n\t\tMap filterRegistrations = this.context\n\t\t\t\t.getBeansOfType(FilterRegistrationBean.class);\n\t\tassertThat(filterRegistrations).containsKey(\"webMvcMetricsFilter\");\n\t\tFilterRegistrationBean registration = filterRegistrations\n\t\t\t\t.get(\"webMvcMetricsFilter\");\n\t\tassertThat(registration.getFilter()).isInstanceOf(WebMvcMetricsFilter.class);\n\t\tassertThat((Set) ReflectionTestUtils.getField(registration,\n\t\t\t\t\"dispatcherTypes\")).containsExactlyInAnyOrder(DispatcherType.REQUEST,\n\t\t\t\t\t\tDispatcherType.ASYNC);\n\t}\n\n\t@Configuration(proxyBeanMethods = false)\n\t@ImportAutoConfiguration({ MetricsAutoConfiguration.class,\n\t\t\tJvmMetricsAutoConfiguration.class, LogbackMetricsAutoConfiguration.class,\n\t\t\tSystemMetricsAutoConfiguration.class, RabbitMetricsAutoConfiguration.class,\n\t\t\tCacheMetricsAutoConfiguration.class,\n\t\t\tDataSourcePoolMetricsAutoConfiguration.class,\n\t\t\tHibernateMetricsAutoConfiguration.class,\n\t\t\tHttpClientMetricsAutoConfiguration.class,\n\t\t\tWebFluxMetricsAutoConfiguration.class, WebMvcMetricsAutoConfiguration.class,\n\t\t\tJacksonAutoConfiguration.class, HttpMessageConvertersAutoConfiguration.class,\n\t\t\tRestTemplateAutoConfiguration.class, WebMvcAutoConfiguration.class,\n\t\t\tDispatcherServletAutoConfiguration.class,\n\t\t\tServletWebServerFactoryAutoConfiguration.class })\n\t@Import(PersonController.class)\n\tstatic class MetricsApp {\n\n\t\t@Primary\n\t\t@Bean\n\t\tpublic MeterRegistry registry() {\n\t\t\treturn new SimpleMeterRegistry(SimpleConfig.DEFAULT, new MockClock());\n\t\t}\n\n\t\t@Bean\n\t\tpublic RestTemplate restTemplate(RestTemplateBuilder restTemplateBuilder) {\n\t\t\treturn restTemplateBuilder.build();\n\t\t}\n\n\t\t@Bean\n\t\tpublic CyclicBarrier cyclicBarrier() {\n\t\t\treturn new CyclicBarrier(2);\n\t\t}\n\n\t}\n\n\t@RestController\n\tstatic class PersonController {\n\n\t\t@GetMapping(\"/api/people\")\n\t\tSet personName() {\n\t\t\treturn Collections.singleton(\"Jon\");\n\t\t}\n\n\t}\n\n}\n", "meta": {"content_hash": "71bc690cf69ccb97014ef702ae5dae8a", "timestamp": "", "source": "github", "line_count": 173, "max_line_length": 108, "avg_line_length": 42.820809248554916, "alnum_prop": 0.8353131749460043, "repo_name": "lburgazzoli/spring-boot", "id": "e98552b7ebf86f23ce82fed374d90e3b26640e1a", "size": "8029", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spring-boot-project/spring-boot-actuator-autoconfigure/src/test/java/org/springframework/boot/actuate/autoconfigure/metrics/test/MetricsIntegrationTests.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "6954"}, {"name": "CSS", "bytes": "5769"}, {"name": "FreeMarker", "bytes": "2134"}, {"name": "Groovy", "bytes": "49512"}, {"name": "HTML", "bytes": "69689"}, {"name": "Java", "bytes": "11602150"}, {"name": "JavaScript", "bytes": "37789"}, {"name": "Ruby", "bytes": "1307"}, {"name": "Shell", "bytes": "27916"}, {"name": "Smarty", "bytes": "3276"}, {"name": "XSLT", "bytes": "34105"}]}} {"text": "data['pagetitle'] = 'Ordered TODO List';\n $tasks = $this->tasks->all(); // get all the tasks\n $this->data['content'] = 'Ok'; // so we don't need pagebody\n $this->data['leftside'] = $this->makePrioritizedPanel($tasks);\n $this->data['rightside'] = $this->makeCategorizedPanel($tasks);\n $this->render('template_secondary'); \n }\n \n function makePrioritizedPanel($tasks) \n {\n foreach ($tasks as $task)\n {\n if ($task->status != 2)\n $undone[] = $task;\n }\n usort($undone, \"orderByPriority\");\n foreach ($undone as $task)\n $task->priority = $this->priorities->get($task->priority)->name;\n \n foreach ($undone as $task)\n $converted[] = (array) $task;\n \n $parms = ['display_tasks' => $converted];\n \n $role = $this->session->userdata('userrole');\n $parms['completer'] = ($role == ROLE_OWNER) ? '/views/complete' : '#';\n return $this->parser->parse('by_priority', $parms, true);\n }\n \n function makeCategorizedPanel($tasks) \n {\n $parms = ['display_tasks' => $this->tasks->getCategorizedTasks()];\n \n $role = $this->session->userdata('userrole');\n $parms['completer'] = ($role == ROLE_OWNER) ? '/views/complete' : '#';\n \n return $this->parser->parse('by_category',$parms,true);\n }\n \n function complete(){\n $role = $this->session->userdata('userrole');\n if ($role != ROLE_OWNER) redirect('/work');\n \n foreach($this->input->post() as $key=>$value){\n if(substr($key,0,4) == 'task'){\n $taskid = substr($key,4);\n $task = $this->tasks->get($taskid);\n $task->status = 2; \n $this->tasks->update($task);\n }\n }\n $this->index();\n }\n}\n\nfunction orderByPriority($a, $b)\n{\n if ($a->priority > $b->priority)\n return -1;\n elseif ($a->priority < $b->priority)\n return 1;\n else\n return 0;\n} ", "meta": {"content_hash": "cace378dfe7f31a84e026385520da689", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 78, "avg_line_length": 31.140845070422536, "alnum_prop": 0.5133423790140208, "repo_name": "Comp4711Lab5-Carmen-Hanuk/lab5", "id": "47267265ba278d4d389c32546da0ab59f900a73c", "size": "2211", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "application/controllers/Views.php", "mode": "33188", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "602"}, {"name": "CSS", "bytes": "1235"}, {"name": "HTML", "bytes": "5633"}, {"name": "PHP", "bytes": "1815269"}]}} {"text": "package edu.cs4730.mywatchface;\n\n\nimport android.annotation.SuppressLint;\nimport android.content.res.Resources;\nimport android.graphics.Canvas;\nimport android.graphics.Color;\nimport android.graphics.Paint;\nimport android.graphics.Rect;\nimport android.graphics.Typeface;\nimport android.content.Context;\nimport android.view.SurfaceHolder;\n\nimport java.time.ZonedDateTime;\nimport java.util.Calendar;\n\nimport androidx.annotation.NonNull;\nimport androidx.annotation.Nullable;\nimport androidx.core.content.ContextCompat;\nimport androidx.wear.watchface.CanvasType;\nimport androidx.wear.watchface.ComplicationSlotsManager;\nimport androidx.wear.watchface.DrawMode;\nimport androidx.wear.watchface.RenderParameters;\nimport androidx.wear.watchface.Renderer;\nimport androidx.wear.watchface.WatchFace;\nimport androidx.wear.watchface.WatchFaceService;\nimport androidx.wear.watchface.WatchFaceType;\nimport androidx.wear.watchface.WatchState;\nimport androidx.wear.watchface.style.CurrentUserStyleRepository;\nimport kotlin.coroutines.Continuation;\n\nimport static android.graphics.Color.*;\n\n/**\n * this code is based off google's original example, but updated to point, it is likely\n * no longer recognizable.\n */\n\npublic class myWatchFaceService extends WatchFaceService {\n\n private static final String TAG = \"myWatchFaceService\";\n\n private static final Typeface BOLD_TYPEFACE =\n Typeface.create(Typeface.SANS_SERIF, Typeface.BOLD);\n private static final Typeface NORMAL_TYPEFACE =\n Typeface.create(Typeface.SANS_SERIF, Typeface.NORMAL);\n\n @Nullable\n @Override\n protected WatchFace createWatchFace(@NonNull SurfaceHolder surfaceHolder,\n @NonNull WatchState watchState,\n @NonNull ComplicationSlotsManager complicationSlotsManager,\n @NonNull CurrentUserStyleRepository currentUserStyleRepository,\n @NonNull Continuation continuation) {\n\n\n return new WatchFace(\n WatchFaceType.DIGITAL,\n new myWatchFaceService.myCanvasRender(getApplicationContext(), surfaceHolder, watchState, complicationSlotsManager, currentUserStyleRepository, CanvasType.HARDWARE)\n );\n\n }\n\n class myCanvasRender extends Renderer.CanvasRenderer2 {\n static private final long FRAME_PERIOD_MS_DEFAULT = 16L;\n final boolean clearWithBackgroundTintBeforeRenderingHighlightLayer = false;\n\n Context context;\n SurfaceHolder surfaceHolder;\n WatchState watchState;\n ComplicationSlotsManager complicationSlotsManager;\n CurrentUserStyleRepository currentUserStyleRepository;\n int canvasType;\n\n Paint mBackgroundPaint;\n Paint mHourPaint;\n Paint mMinutePaint;\n Paint mSecondPaint;\n Paint mAmPmPaint;\n Paint mColonPaint;\n float mColonWidth;\n\n Calendar mTime;\n boolean mShouldDrawColons;\n float mXOffset;\n float mYOffset;\n String mAmString;\n String mPmString;\n int mInteractiveBackgroundColor = Color.BLACK;//parseColor(\"black\");\n int mInteractiveHourDigitsColor = parseColor(\"white\");\n int mInteractiveMinuteDigitsColor = parseColor(\"white\");\n int mInteractiveSecondDigitsColor = parseColor(\"gray\");\n boolean mAmbient;\n\n static final String COLON_STRING = \":\";\n\n @Override\n public void render(@NonNull Canvas canvas, @NonNull Rect bounds, @NonNull ZonedDateTime zonedDateTime, @NonNull myWatchFaceService.myCanvasRender.myShareAssets myShareAssets) {\n\n RenderParameters renderParameters = getRenderParameters();\n mAmbient = renderParameters.getDrawMode() == DrawMode.AMBIENT;\n // Calendar cal = new Calendar(getSystemTimeProvider().getSystemTimeMillis());\n mTime = Calendar.getInstance();\n\n /* draw your watch face */\n\n // Show colons for the first half of each second so the colons blink on when the time\n // updates.\n mShouldDrawColons = (System.currentTimeMillis() % 1000) < 500;\n\n // Draw the background.\n canvas.drawRect(0, 0, bounds.width(), bounds.height(), mBackgroundPaint);\n\n // Draw the hours.\n float x = mXOffset;\n String hourString = String.valueOf(convertTo12Hour(mTime.get(Calendar.HOUR)));\n canvas.drawText(hourString, x, mYOffset, mHourPaint);\n x += mHourPaint.measureText(hourString);\n\n\n // In ambient and mute modes, always draw the first colon. Otherwise, draw the\n // first colon for the first half of each second.\n if (mAmbient || mShouldDrawColons) {\n canvas.drawText(COLON_STRING, x, mYOffset, mColonPaint);\n }\n x += mColonWidth;\n\n // Draw the minutes.\n String minuteString = formatTwoDigitNumber(mTime.get(Calendar.MINUTE));\n canvas.drawText(minuteString, x, mYOffset, mMinutePaint);\n x += mMinutePaint.measureText(minuteString);\n\n // In ambient and mute modes, draw AM/PM. Otherwise, draw a second blinking\n // colon followed by the seconds.\n if (mAmbient) {\n x += mColonWidth;\n canvas.drawText(getAmPmString(mTime.get(Calendar.HOUR)), x, mYOffset, mAmPmPaint);\n } else {\n if (mShouldDrawColons) {\n canvas.drawText(COLON_STRING, x, mYOffset, mColonPaint);\n }\n x += mColonWidth;\n canvas.drawText(formatTwoDigitNumber(mTime.get(Calendar.SECOND)), x, mYOffset,\n mSecondPaint);\n\n }\n }\n\n\n @Override\n public void renderHighlightLayer(@NonNull Canvas canvas, @NonNull Rect rect, @NonNull ZonedDateTime zonedDateTime, @NonNull myShareAssets myShareAssets) {\n ///what does this do?? no documentation as to it use.\n\n }\n\n class myShareAssets implements SharedAssets {\n @Override\n public void onDestroy() {\n\n }\n\n }\n\n @Nullable\n @Override\n public myWatchFaceService.myCanvasRender.myShareAssets createSharedAssets(@NonNull Continuation completion) {\n return new myWatchFaceService.myCanvasRender.myShareAssets();\n }\n\n private Paint createTextPaint(int defaultInteractiveColor) {\n return createTextPaint(defaultInteractiveColor, NORMAL_TYPEFACE);\n }\n\n private Paint createTextPaint(int defaultInteractiveColor, Typeface typeface) {\n Paint paint = new Paint();\n paint.setColor(defaultInteractiveColor);\n paint.setTypeface(typeface);\n paint.setAntiAlias(true);\n return paint;\n }\n\n @SuppressLint(\"DefaultLocale\")\n private String formatTwoDigitNumber(int hour) {\n return String.format(\"%02d\", hour);\n }\n\n private int convertTo12Hour(int hour) {\n int result = hour % 12;\n return (result == 0) ? 12 : result;\n }\n\n private String getAmPmString(int hour) {\n return (hour < 12) ? mAmString : mPmString;\n }\n\n public myCanvasRender(@NonNull Context mcontext, @NonNull SurfaceHolder msurfaceHolder, @NonNull WatchState mwatchState, @NonNull ComplicationSlotsManager mcomplicationSlotsManager, @NonNull final CurrentUserStyleRepository mcurrentUserStyleRepository, int mcanvasType) {\n super(msurfaceHolder, mcurrentUserStyleRepository, mwatchState, mcanvasType, FRAME_PERIOD_MS_DEFAULT, false);\n\n context = mcontext;\n surfaceHolder = msurfaceHolder;\n watchState = mwatchState;\n complicationSlotsManager = mcomplicationSlotsManager;\n currentUserStyleRepository = mcurrentUserStyleRepository;\n canvasType = mcanvasType;\n\n //setup initial Paint colors.\n mBackgroundPaint = new Paint();\n mBackgroundPaint.setColor(Color.BLACK);\n mBackgroundPaint.setAntiAlias(true);\n\n Resources resources = myWatchFaceService.this.getResources();\n //setup variables.\n mYOffset = resources.getDimension(R.dimen.digital_y_offset);\n mAmString = resources.getString(R.string.digital_am);\n mPmString = resources.getString(R.string.digital_pm);\n\n mBackgroundPaint = new Paint();\n mBackgroundPaint.setColor(mInteractiveBackgroundColor);\n mHourPaint = createTextPaint(mInteractiveHourDigitsColor, BOLD_TYPEFACE);\n mMinutePaint = createTextPaint(mInteractiveMinuteDigitsColor);\n mSecondPaint = createTextPaint(mInteractiveSecondDigitsColor);\n mAmPmPaint = createTextPaint(ContextCompat.getColor(getApplicationContext(), R.color.digital_am_pm));\n mColonPaint = createTextPaint(ContextCompat.getColor(getApplicationContext(), R.color.digital_colons));\n\n\n surfaceHolder.getSurfaceFrame();\n\n boolean isRound = true; //how determine it's round or not?\n mXOffset = resources.getDimension(isRound ? R.dimen.digital_x_offset_round : R.dimen.digital_x_offset);\n float textSize = resources.getDimension(isRound ? R.dimen.digital_text_size_round : R.dimen.digital_text_size);\n float amPmSize = resources.getDimension(isRound ? R.dimen.digital_am_pm_size_round : R.dimen.digital_am_pm_size);\n\n mHourPaint.setTextSize(textSize);\n mMinutePaint.setTextSize(textSize);\n mSecondPaint.setTextSize(textSize);\n mAmPmPaint.setTextSize(amPmSize);\n mColonPaint.setTextSize(textSize);\n\n mColonWidth = mColonPaint.measureText(COLON_STRING);\n\n }\n }\n\n\n}\n", "meta": {"content_hash": "662289d194fa0dc24681026778ef4ff9", "timestamp": "", "source": "github", "line_count": 245, "max_line_length": 279, "avg_line_length": 40.4, "alnum_prop": 0.669529197817741, "repo_name": "JimSeker/wearable", "id": "abfa747f133f1cd01d61ecddfafdfe7a860b8f13", "size": "9898", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "wear3/myWatchFace/wear/src/main/java/edu/cs4730/mywatchface/myWatchFaceService.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "206738"}]}} {"text": "\n\npackage android.net;\n\n/**\n * Thrown when parsing a URL fails.\n */\n// See non-public class {@link WebAddress}.\npublic class ParseException extends RuntimeException {\n public String response;\n\n ParseException(String response) {\n this.response = response;\n }\n}\n", "meta": {"content_hash": "ae4c16d0b0593890cfc31e9b836bc7dd", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 54, "avg_line_length": 18.4, "alnum_prop": 0.6884057971014492, "repo_name": "haikuowuya/android_system_code", "id": "68b209b6d81ba3aaa6c7d1047002a99a36863e93", "size": "895", "binary": false, "copies": "16", "ref": "refs/heads/master", "path": "src/android/net/ParseException.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "182432"}, {"name": "Java", "bytes": "124952631"}]}} {"text": "\ufeff// Copyright (c) ppy Pty Ltd . Licensed under the MIT Licence.\n// See the LICENCE file in the repository root for full licence text.\n\nusing System;\n\nnamespace osu.Game.Database\n{\n /// \n /// Represents a model manager that publishes events when s are added or removed.\n /// \n /// The model type.\n public interface IModelManager\n where TModel : class\n {\n event Action ItemAdded;\n\n event Action ItemRemoved;\n }\n}\n", "meta": {"content_hash": "87745d719286916d2f08eb1280091505", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 114, "avg_line_length": 30.63157894736842, "alnum_prop": 0.6752577319587629, "repo_name": "EVAST9919/osu", "id": "1bdbbb48e6fd1e0ad490e51500a5b4b4f818bca8", "size": "584", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "osu.Game/Database/IModelManager.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "6595208"}, {"name": "PowerShell", "bytes": "936"}, {"name": "Ruby", "bytes": "7983"}]}} {"text": "package streaming_mysql_backup_tool \n/*\n* File Generated by enaml generator\n* !!! Please do not edit this file !!!\n*/\ntype BackupServer struct {\n\n\t/*Port - Descr: Port number used for listening for backup requests Default: 8081\n*/\n\tPort interface{} `yaml:\"port,omitempty\"`\n\n}", "meta": {"content_hash": "b02fed02f45e12ec0b36e42ef9c22b02", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 81, "avg_line_length": 22.916666666666668, "alnum_prop": 0.72, "repo_name": "enaml-ops/ert-plugin", "id": "719c1e482e14dbdb6461ccc5f649da60a8bf2e08", "size": "275", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "enaml-gen/streaming-mysql-backup-tool/backupserver.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Go", "bytes": "862257"}, {"name": "Shell", "bytes": "521"}]}} {"text": "\n\n\n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n\n \n \n\n \n \n \n \n \n \n \n \n \n \n \n statsmodels.discrete.conditional_models.ConditionalLogit.information — statsmodels\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n Skip to content \n
\n \n
\n\n \n
\n \n \n \n \n
\n
\n \n
\n
\n
\n \n
\n
\n
\n \n \n
\n
\n \n
\n

statsmodels.discrete.conditional_models.ConditionalLogit.information\u00b6

\n
\n
\nConditionalLogit.information(params)\u00b6
\n

Fisher information matrix of model.

\n

Returns -1 * Hessian of the log-likelihood evaluated at params.

\n
\n
Parameters:
\n
\n
paramsndarray

The model parameters.

\n
\n
\n
\n
\n
\n
\n\n\n
\n
\n
\n
\n
\n
\n \n \n \n", "meta": {"content_hash": "65f322fdc20f8e09ace6d084bb6570eb", "timestamp": "", "source": "github", "line_count": 515, "max_line_length": 999, "avg_line_length": 39.15339805825243, "alnum_prop": 0.6077167228724459, "repo_name": "statsmodels/statsmodels.github.io", "id": "cce2ec1c243b5a1471de2a0740b55c02c09838c2", "size": "20168", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "stable/generated/statsmodels.discrete.conditional_models.ConditionalLogit.information.html", "mode": "33188", "license": "bsd-3-clause", "language": []}} {"text": "'use strict';\n\nconst async = require(`async`);\nconst Router = require(`express`).Router;\nconst router = new Router();\nconst jwtAuth = require(`./authentication`);\nconst sqlite3 = require(`../lib/sqlite3`).getInstance();\nconst bcrypt = require(`bcrypt`);\n\nconst nmcli = require(`../lib/nmcli`);\n\nrouter.get(`/access_points`, jwtAuth.authorizeRequest, (req, res, next) => {\n nmcli.listAp((err, result) => {\n if (err) {\n return next(err);\n }\n\n return res.json({ error: false, result });\n });\n});\n\nrouter.get(`/wireless_devices`, (req, res) =>\n res.json(nmcli.listWirelessDevice())\n);\n\nrouter.get(`/wifi_active_connections`, jwtAuth.authorizeRequest, (req, res, next) => {\n const devices = nmcli.listWirelessDevice()\n .filter(dev => dev.state === `connected`)\n .map(dev => dev.device);\n\n async.map(devices, (device, done) => {\n nmcli.activeConnectionOnIface(device, (err, connection) => {\n if (err) return done(err);\n return done(null, connection);\n });\n },\n\n (err, result) => {\n if (err) {\n return next(err);\n }\n\n return res.json({ active_connections: result });\n });\n});\n\nrouter.post(`/connect`, jwtAuth.authorizeRequest, (req, res, next) => {\n nmcli.connect(req.body.ssid, req.body.password, req.body.force, (err) => {\n if (err) {\n return next(new Error(`Failed to connect to AP. Try force connect & change the password`));\n }\n\n return res.json({ error: false, connected: true });\n });\n});\n\nrouter.post(`/disconnect`, jwtAuth.authorizeRequest, (req, res, next) => {\n nmcli.disconnect(req.body.iface, (err) => {\n if (err) {\n return next(new Error(`Failed to disconnect interface ${req.body.iface}`));\n }\n\n return res.json({ error: false, disconnected: true });\n });\n});\n\nrouter.post(`/login`, jwtAuth.authenticate);\n\nrouter.post(`/change_pass`, jwtAuth.authorizeRequest, (req, res, next) => {\n sqlite3.serialize(() => {\n const payload = {\n oldPassword: req.body.oldPassword,\n newPassword: req.body.newPassword,\n confirmNewPassword: req.body.confirmNewPassword,\n };\n\n const username = req.jwt_token.username;\n\n if (payload.newPassword !== payload.confirmNewPassword) {\n const error = new Error(`Password & confirm password do not match`);\n error.status = 401;\n\n return next(error);\n }\n\n return sqlite3.get(`SELECT * FROM account WHERE username = \"${username}\"`, (getErr, user) => {\n if (getErr) {\n return next(new Error(`Failed to verify old password`));\n }\n\n if (!user) {\n return next(new Error(`User is not found`));\n }\n\n if (!bcrypt.compareSync(payload.oldPassword, user.password)) {\n return next(new Error(`Old password mismatch`));\n }\n\n const hashed = bcrypt.hashSync(payload.newPassword, 8);\n\n return sqlite3.run(`UPDATE account SET password = \"${hashed}\" WHERE username = \"${username}\"`, (updateErr) => {\n if (updateErr) {\n return next(new Error(`Failed to update password`));\n }\n\n return res.json({ error: false, updated: true });\n });\n });\n });\n});\n\nmodule.exports = router;\n", "meta": {"content_hash": "9d681df44e78a1ab31ca293242bfe497", "timestamp": "", "source": "github", "line_count": 112, "max_line_length": 117, "avg_line_length": 27.857142857142858, "alnum_prop": 0.6198717948717949, "repo_name": "alwint3r/wlanturn", "id": "f77c6510c0270c95e3b25bab1b88678466f0b439", "size": "3120", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "middlewares/api.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "473"}, {"name": "HTML", "bytes": "1135"}, {"name": "JavaScript", "bytes": "45407"}]}} {"text": "get('page', 1);\n $pageSize = $this->container->getParameter('page_size');\n\n $user = $this->getAuthenticatedUser();\n\n $interactor = $this->getInteractor(SecurityInteractorRegister::FIND_COMPANIES);\n\n $request = new FindCompaniesRequest($user ? $user->getId() : null, $page - 1, $pageSize);\n $presenter = new CompaniesPresenter();\n\n $interactor->process($request, $presenter);\n\n $paginator = $this->get('knp_paginator');\n $pagination = $paginator->paginate(\n $presenter,\n $page,\n $pageSize\n );\n\n return array(\n 'pagination' => $pagination\n );\n }\n\n /**\n * @Route(\"/new\", name=\"company_new\")\n * @Template\n */\n public function newAction(Request $request)\n {\n return $this->processForm($request);\n }\n\n /**\n * @Route(\"/{id}/edit\", name=\"company_edit\")\n * @Template\n */\n public function editAction(Request $request, $id)\n {\n return $this->processForm($request, $id);\n }\n\n /**\n * @Route(\"/{id}/delete\", name=\"company_delete\")\n * @Template\n */\n public function deleteAction($id)\n {\n $user = $this->getAuthenticatedUser();\n\n $interactor = $this->getInteractor(SecurityInteractorRegister::DELETE_COMPANY);\n\n $request = new DeleteCompanyRequest($user ? $user->getId() : null, $id);\n $presenter = new CompanyPresenter();\n\n $interactor->process($request, $presenter);\n\n $this->addFlash('success', 'flash.company.deleted', array('%company%' => $presenter->getCompany()));\n\n return $this->redirect($this->generateUrl('companies'));\n }\n\n private function processForm(Request $request, $id = null)\n {\n /** @var CompanyFormProcessor $processor */\n $processor = $this->get('company_form_processor');\n\n $processor->process($request, $id);\n\n if ($processor->isValid()) {\n $this->addFlash('success', $id ? 'flash.company.updated' : 'flash.company.created', array('%company%' => $processor->getCompany()));\n\n if ($processor->isRedirectingTo(CompanyFormProcessor::REDIRECT_TO_LIST))\n return $this->redirect($this->generateUrl('companies'));\n\n return $this->redirect($this->generateUrl('company_edit', array(\n 'id' => $processor->getCompany()->getId())\n ));\n }\n\n $form = $processor->getForm();\n\n return array(\n 'errors' => $processor->getErrors(),\n 'form' => $form->createView()\n );\n }\n\n}\n\n", "meta": {"content_hash": "c73eb0d1cfd83ea266131a48e8cdf63f", "timestamp": "", "source": "github", "line_count": 118, "max_line_length": 144, "avg_line_length": 29.915254237288135, "alnum_prop": 0.6235127478753542, "repo_name": "disider/SecurityBundle", "id": "451c1df457eb7ff27577bc6e320f240d860c52aa", "size": "3530", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Controller/CompanyController.php", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "18720"}, {"name": "JavaScript", "bytes": "9957"}, {"name": "PHP", "bytes": "216649"}]}} {"text": "ACCEPTED\n\n#### According to\nInternational Plant Names Index\n\n#### Published in\nnull\n\n#### Original name\nnull\n\n### Remarks\nnull", "meta": {"content_hash": "f5a3c452b05a7614736653083fefe29c", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "b490a36887c9a685310c1056bd603f62162dbe9c", "size": "183", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Pteridophyta/Polypodiopsida/Polypodiales/Dryopteridaceae/Elaphoglossum/Elaphoglossum heterochroum/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "local require = require\nlocal rawget = rawget\nlocal pairs = pairs\n\nlocal table = require \"loop.table\"\n\nmodule \"loop.simple\"\n--------------------------------------------------------------------------------\nlocal ObjectCache = require \"loop.collection.ObjectCache\"\nlocal base = require \"loop.base\"\n--------------------------------------------------------------------------------\ntable.copy(base, _M)\n--------------------------------------------------------------------------------\nlocal DerivedClass = ObjectCache {\n\tretrieve = function(self, super)\n\t\treturn base.class { __index = super, __call = new }\n\tend,\n}\nfunction class(class, super)\n\tif super\n\t\tthen return DerivedClass[super](initclass(class))\n\t\telse return base.class(class)\n\tend\nend\n--------------------------------------------------------------------------------\nfunction isclass(class)\n\tlocal metaclass = classof(class)\n\tif metaclass then\n\t\treturn metaclass == rawget(DerivedClass, metaclass.__index) or\n\t\t base.isclass(class)\n\tend\nend\n--------------------------------------------------------------------------------\nfunction superclass(class)\n\tlocal metaclass = classof(class)\n\tif metaclass then return metaclass.__index end\nend\n--------------------------------------------------------------------------------\nfunction subclassof(class, super)\n\twhile class do\n\t\tif class == super then return true end\n\t\tclass = superclass(class)\n\tend\n\treturn false\nend\n--------------------------------------------------------------------------------\nfunction instanceof(object, class)\n\treturn subclassof(classof(object), class)\nend\n", "meta": {"content_hash": "92c9464c88325b9093a2d18d6d8a1997", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 80, "avg_line_length": 32.51020408163265, "alnum_prop": 0.4783427495291902, "repo_name": "ld-test/oil", "id": "860cc17ce076c7b8731a1509b1b938499d445657", "size": "3619", "binary": false, "copies": "13", "ref": "refs/heads/master", "path": "lua/loop/simple.lua", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "159816"}, {"name": "Lua", "bytes": "1003003"}, {"name": "Makefile", "bytes": "13314"}, {"name": "Shell", "bytes": "6060"}]}} {"text": "\nnamespace tpp {\n\n/*! \\file assert.hpp\n \\brief Implements a better 'Assert'\n */\n\n#ifndef REVIVER_ASSERT_HPP\n#define REVIVER_ASSERT_HPP\n\n\n/*! \\def MyAssertFunction\n \\brief Function used by 'Assert' function in _DEBUG mode.\n \n Details.\n*/\n//Fix for Visual Studio C++ needing full definition of MyAssertFunction\n#if defined _MSC_VER\n\textern bool MyAssertFunction( bool b, char* desc, int line, char* file) {return 1;};\n#else\n\textern bool MyAssertFunction( bool b, char* desc, int line, char* file);\n#endif\n\n#if defined( _DEBUG )\n#define Assert( exp, description ) tpp::MyAssertFunction( (int)(exp), description, __LINE__, __FILE__ )\n#else\n#define Assert( exp, description )\n#endif\n\n\n#endif\n\n}\n", "meta": {"content_hash": "eceac01f903f7e9daf6addf84a373a83", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 103, "avg_line_length": 21.363636363636363, "alnum_prop": 0.6921985815602837, "repo_name": "HellicarAndLewis/Stripes", "id": "45bb0a006ca4f7a7ca641634f5cd59a865485f42", "size": "705", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "addons/ofxContourAnalysis/triangle/assert.hpp", "mode": "33261", "license": "mit", "language": [{"name": "C", "bytes": "52546"}, {"name": "C++", "bytes": "809884"}]}} {"text": "struct TripAlight {\n edgepayload_t type;\n long external_id;\n State* (*walk)(struct EdgePayload*, struct State*, struct WalkOptions*);\n State* (*walkBack)(struct EdgePayload*, struct State*, struct WalkOptions*);\n \n int n;\n int* arrivals;\n char** trip_ids;\n int* stop_sequences;\n \n ServiceCalendar* calendar;\n Timezone* timezone;\n int agency;\n ServiceId service_id;\n \n int overage; //number of seconds schedules past midnight of the last departure. If it's at 12:00:00, the overage is 0.\n} ;\n\nTripAlight*\nalNew( ServiceId service_id, ServiceCalendar* calendar, Timezone* timezone, int agency );\n\nvoid\nalDestroy(TripAlight* this);\n\nServiceCalendar*\nalGetCalendar( TripAlight* this );\n\nTimezone*\nalGetTimezone( TripAlight* this );\n\nint\nalGetAgency( TripAlight* this );\n\nServiceId\nalGetServiceId( TripAlight* this );\n\nint\nalGetNumAlightings(TripAlight* this);\n\nvoid\nalAddAlighting(TripAlight* this, char* trip_id, int arrival, int stop_sequence);\n\nchar*\nalGetAlightingTripId(TripAlight* this, int i);\n\nint\nalGetAlightingArrival(TripAlight* this, int i);\n\nint\nalGetAlightingStopSequence(TripAlight* this, int i);\n\nint\nalSearchAlightingsList(TripAlight* this, int time);\n\nint\nalGetLastAlightingIndex(TripAlight* this, int time);\n\nint\nalGetOverage(TripAlight* this);\n\nint\nalGetAlightingIndexByTripId(TripAlight* this, char* trip_id);\n\ninline State*\nalWalk(EdgePayload* this, State* state, WalkOptions* options);\n\ninline State*\nalWalkBack(EdgePayload* this, State* state, WalkOptions* options);\n\n#endif\n", "meta": {"content_hash": "bbbd05b15f5d4e44d24a877f66fe0ae6", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 122, "avg_line_length": 21.732394366197184, "alnum_prop": 0.7414128321451717, "repo_name": "bmander/graphserver", "id": "524859f6a63259fe3437164cb8b3ac3074a2bc9a", "size": "1675", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "core/edgetypes/tripalight.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "200078"}, {"name": "Python", "bytes": "428337"}]}} {"text": "\nUTF-8\n\n AboutDialog\n \n \n About Bitcoin\n Tietoa Bitcoinista\n \n \n \n <b>Bitcoin</b> version\n <b>Bitcoin</b> versio\n \n \n \n Copyright \u00a9 2009-2012 Bitcoin Developers\n\nThis is experimental software.\n\nDistributed under the MIT/X11 software license, see the accompanying file license.txt or http://www.opensource.org/licenses/mit-license.php.\n\nThis product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.\n Copyright \u00a9 2009-2012 Bitcoin Developers\n\nThis is experimental software.\n\nDistributed under the MIT/X11 software license, see the accompanying file license.txt or http://www.opensource.org/licenses/mit-license.php.\n\nThis product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.\n \n\n\n AddressBookPage\n \n \n Address Book\n Osoitekirja\n \n \n \n These are your Bitcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.\n N\u00e4m\u00e4 ovat sinun Bitcoin-osoitteesi suoritusten vastaanottamiseen. Voit halutessasi antaa kullekin l\u00e4hett\u00e4j\u00e4lle eri osoitteen, jotta voit seurata kuka sinulle maksaa.\n \n \n \n Double-click to edit address or label\n Kaksoisnapauta muokataksesi osoitetta tai nime\u00e4\n \n \n \n Create a new address\n Luo uusi osoite\n \n \n \n Copy the currently selected address to the system clipboard\n Kopioi valittu osoite leikep\u00f6yd\u00e4lle\n \n \n \n &New Address\n \n \n \n \n &Copy Address\n \n \n \n \n Show &QR Code\n N\u00e4yt\u00e4 &QR-koodi\n \n \n \n Sign a message to prove you own this address\n Allekirjoita viesti mill\u00e4 todistat omistavasi t\u00e4m\u00e4n osoitteen\n \n \n \n &Sign Message\n &Allekirjoita viesti\n \n \n \n Delete the currently selected address from the list. Only sending addresses can be deleted.\n Poista valittuna oleva osoite listasta. Vain l\u00e4hett\u00e4miseen k\u00e4ytett\u00e4vi\u00e4 osoitteita voi poistaa.\n \n \n \n &Delete\n &Poista\n \n \n \n Copy &Label\n \n \n \n \n &Edit\n \n \n \n \n Export Address Book Data\n Vie osoitekirja\n \n \n \n Comma separated file (*.csv)\n Comma separated file (*.csv)\n \n \n \n Error exporting\n Virhe viedess\u00e4 osoitekirjaa\n \n \n \n Could not write to file %1.\n Ei voida kirjoittaa tiedostoon %1.\n \n\n\n AddressTableModel\n \n \n Label\n Nimi\n \n \n \n Address\n Osoite\n \n \n \n (no label)\n (ei nime\u00e4)\n \n\n\n AskPassphraseDialog\n \n \n Passphrase Dialog\n \n \n \n \n Enter passphrase\n Anna tunnuslause\n \n \n \n New passphrase\n Uusi tunnuslause\n \n \n \n Repeat new passphrase\n Toista uusi tunnuslause\n \n \n \n Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.\n Anna lompakolle uusi tunnuslause.<br/>K\u00e4yt\u00e4 tunnuslausetta, jossa on ainakin <b>10 satunnaista mekki\u00e4</b> tai <b>kahdeksan sanaa</b>.\n \n \n \n Encrypt wallet\n Salaa lompakko\n \n \n \n This operation needs your wallet passphrase to unlock the wallet.\n T\u00e4t\u00e4 toimintoa varten sinun t\u00e4ytyy antaa lompakon tunnuslause sen avaamiseksi.\n \n \n \n Unlock wallet\n Avaa lompakko\n \n \n \n This operation needs your wallet passphrase to decrypt the wallet.\n T\u00e4t\u00e4 toimintoa varten sinun t\u00e4ytyy antaa lompakon tunnuslause salauksen purkuun.\n \n \n \n Decrypt wallet\n Pura lompakon salaus\n \n \n \n Change passphrase\n Vaihda tunnuslause\n \n \n \n Enter the old and new passphrase to the wallet.\n Anna vanha ja uusi tunnuslause.\n \n \n \n Confirm wallet encryption\n Hyv\u00e4ksy lompakon salaus\n \n \n \n WARNING: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR BITCOINS</b>!\nAre you sure you wish to encrypt your wallet?\n VAROITUS: Mik\u00e4li salaat lompakkosi ja unohdat tunnuslauseen, <b>MENET\u00c4T LOMPAKON KOKO SIS\u00c4LL\u00d6N</b>!\nTahdotko varmasti salata lompakon?\n \n \n \n \n Wallet encrypted\n Lompakko salattu\n \n \n \n Bitcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your bitcoins from being stolen by malware infecting your computer.\n Bitcoin sulkeutuu lopettaakseen salausprosessin. Muista, ett\u00e4 salattu lompakko ei t\u00e4ysin suojaa sit\u00e4 haittaohjelmien aiheuttamilta varkauksilta.\n \n \n \n \n Warning: The Caps Lock key is on.\n Varoitus: Caps Lock on p\u00e4\u00e4ll\u00e4.\n \n \n \n \n \n \n Wallet encryption failed\n Lompakon salaus ep\u00e4onnistui\n \n \n \n Wallet encryption failed due to an internal error. Your wallet was not encrypted.\n Lompakon salaaminen ep\u00e4onnistui sis\u00e4isen virheen vuoksi. Lompakkoa ei salattu.\n \n \n \n \n The supplied passphrases do not match.\n Annetut tunnuslauseet eiv\u00e4t t\u00e4sm\u00e4\u00e4.\n \n \n \n Wallet unlock failed\n Lompakon avaaminen ep\u00e4onnistui.\n \n \n \n \n \n The passphrase entered for the wallet decryption was incorrect.\n Annettu tunnuslause oli v\u00e4\u00e4r\u00e4.\n \n \n \n Wallet decryption failed\n Lompakon salauksen purku ep\u00e4onnistui.\n \n \n \n Wallet passphrase was succesfully changed.\n Lompakon tunnuslause on vaihdettu.\n \n\n\n BitcoinGUI\n \n \n Bitcoin Wallet\n Bitcoin-lompakko\n \n \n \n Sign &message...\n \n \n \n \n Show/Hide &Bitcoin\n N\u00e4yt\u00e4/K\u00e4tke &Bitcoin\n \n \n \n Synchronizing with network...\n Synkronoidaan verkon kanssa...\n \n \n \n &Overview\n &Yleisn\u00e4kym\u00e4\n \n \n \n Show general overview of wallet\n N\u00e4ytt\u00e4\u00e4 kokonaiskatsauksen lompakon tilanteesta\n \n \n \n &Transactions\n &Rahansiirrot\n \n \n \n Browse transaction history\n Selaa rahansiirtohistoriaa\n \n \n \n &Address Book\n &Osoitekirja\n \n \n \n Edit the list of stored addresses and labels\n Muokkaa tallennettujen nimien ja osoitteiden listaa\n \n \n \n &Receive coins\n &Vastaanota Bitcoineja\n \n \n \n Show the list of addresses for receiving payments\n N\u00e4yt\u00e4 Bitcoinien vastaanottamiseen k\u00e4ytetyt osoitteet\n \n \n \n &Send coins\n &L\u00e4het\u00e4 Bitcoineja\n \n \n \n Prove you control an address\n Todista ett\u00e4 hallitset osoitetta\n \n \n \n E&xit\n L&opeta\n \n \n \n Quit application\n Lopeta ohjelma\n \n \n \n &About %1\n &Tietoja %1\n \n \n \n Show information about Bitcoin\n N\u00e4yt\u00e4 tietoa Bitcoin-projektista\n \n \n \n About &Qt\n Tietoja &Qt\n \n \n \n Show information about Qt\n N\u00e4yt\u00e4 tietoja QT:ta\n \n \n \n &Options...\n &Asetukset...\n \n \n \n &Encrypt Wallet...\n \n \n \n \n &Backup Wallet...\n \n \n \n \n &Change Passphrase...\n \n \n \n \n ~%n block(s) remaining\n ~%n lohko j\u00e4ljell\u00e4~%n lohkoja j\u00e4ljell\u00e4\n \n \n \n Downloaded %1 of %2 blocks of transaction history (%3% done).\n Ladattu %1 / %2 lohkoista rahansiirtohistoriasta (%3% suoritettu).\n \n \n \n &Export...\n &Vie...\n \n \n \n Send coins to a Bitcoin address\n \n \n \n \n Modify configuration options for Bitcoin\n \n \n \n \n Show or hide the Bitcoin window\n N\u00e4yt\u00e4 tai piillota Bitcoin-ikkuna\n \n \n \n Export the data in the current tab to a file\n Vie auki olevan v\u00e4lilehden tiedot tiedostoon\n \n \n \n Encrypt or decrypt wallet\n Salaa tai poista salaus lompakosta\n \n \n \n Backup wallet to another location\n Varmuuskopioi lompakko toiseen sijaintiin\n \n \n \n Change the passphrase used for wallet encryption\n Vaihda lompakon salaukseen k\u00e4ytett\u00e4v\u00e4 tunnuslause\n \n \n \n &Debug window\n \n \n \n \n Open debugging and diagnostic console\n \n \n \n \n &Verify message...\n \n \n \n \n Verify a message signature\n \n \n \n \n &File\n &Tiedosto\n \n \n \n &Settings\n &Asetukset\n \n \n \n &Help\n &Apua\n \n \n \n Tabs toolBHC\n V\u00e4lilehtipalkki\n \n \n \n Actions toolBHC\n Toimintopalkki\n \n \n \n \n [testnet]\n [testnet]\n \n \n \n \n Bitcoin client\n Bitcoin-asiakas\n \n \n \n %n active connection(s) to Bitcoin network\n %n aktiivinen yhteys Bitcoin-verkkoon%n aktiivista yhteytt\u00e4 Bitcoin-verkkoon\n \n \n \n Downloaded %1 blocks of transaction history.\n Ladattu %1 lohkoa rahansiirron historiasta.\n \n \n \n %n second(s) ago\n %n sekunti sitten%n sekuntia sitten\n \n \n \n %n minute(s) ago\n %n minuutti sitten%n minuuttia sitten\n \n \n \n %n hour(s) ago\n %n tunti sitten%n tuntia sitten\n \n \n \n %n day(s) ago\n %n p\u00e4iv\u00e4 sitten%n p\u00e4iv\u00e4\u00e4 sitten\n \n \n \n Up to date\n Rahansiirtohistoria on ajan tasalla\n \n \n \n Catching up...\n Kurotaan kiinni...\n \n \n \n Last received block was generated %1.\n Viimeisin vastaanotettu lohko tuotettu %1.\n \n \n \n This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?\n T\u00e4m\u00e4 rahansiirto ylitt\u00e4\u00e4 kokorajoituksen. Voit siit\u00e4 huolimatta l\u00e4hett\u00e4\u00e4 sen %1 siirtopalkkion mik\u00e4 menee solmuille jotka k\u00e4sittelev\u00e4t rahansiirtosi t\u00e4m\u00e4 auttaa my\u00f6s verkostoa. Haluatko maksaa siirtopalkkion? \n \n \n \n Confirm transaction fee\n \n \n \n \n Sent transaction\n L\u00e4hetetyt rahansiirrot\n \n \n \n Incoming transaction\n Saapuva rahansiirto\n \n \n \n Date: %1\nAmount: %2\nType: %3\nAddress: %4\n\n P\u00e4iv\u00e4: %1\nM\u00e4\u00e4r\u00e4: %2\nTyyppi: %3\nOsoite: %4\n \n \n \n Wallet is <b>encrypted</b> and currently <b>unlocked</b>\n Lompakko on <b>salattu</b> ja t\u00e4ll\u00e4 hetkell\u00e4 <b>avoinna</b>\n \n \n \n Wallet is <b>encrypted</b> and currently <b>locked</b>\n Lompakko on <b>salattu</b> ja t\u00e4ll\u00e4 hetkell\u00e4 <b>lukittuna</b>\n \n \n \n Backup Wallet\n Varmuuskopioi lompakko\n \n \n \n Wallet Data (*.dat)\n Lompakkodata (*.dat)\n \n \n \n Backup Failed\n Varmuuskopio ep\u00e4onnistui\n \n \n \n There was an error trying to save the wallet data to the new location.\n Virhe tallennettaessa lompakkodataa uuteen sijaintiin.\n \n \n \n A fatal error occured. Bitcoin can no longer continue safely and will quit.\n \n \n\n\n ClientModel\n \n \n Network Alert\n \n \n\n\n DisplayOptionsPage\n \n \n Display\n N\u00e4ytt\u00f6\n \n \n \n default\n \n \n \n \n The user interface language can be set here. This setting will only take effect after restarting Bitcoin.\n \n \n \n \n User Interface &Language:\n \n \n \n \n &Unit to show amounts in:\n \n \n \n \n Choose the default subdivision unit to show in the interface, and when sending coins\n Valitse oletus lis\u00e4m\u00e4\u00e4re mik\u00e4 n\u00e4kyy k\u00e4ytt\u00f6liittym\u00e4ss\u00e4 ja kun l\u00e4het\u00e4t kolikoita\n \n \n \n &Display addresses in transaction list\n \n \n \n \n Whether to show Bitcoin addresses in the transaction list\n \n \n \n \n Warning\n \n \n \n \n This setting will take effect after restarting Bitcoin.\n \n \n\n\n EditAddressDialog\n \n \n Edit Address\n Muokkaa osoitetta\n \n \n \n &Label\n &Nimi\n \n \n \n The label associated with this address book entry\n T\u00e4h\u00e4n osoitteeseen liitetty nimi\n \n \n \n &Address\n &Osoite\n \n \n \n The address associated with this address book entry. This can only be modified for sending addresses.\n Osoite, joka liittyy t\u00e4m\u00e4n osoitekirjan merkint\u00e4\u00e4n. T\u00e4t\u00e4 voidaan muuttaa vain l\u00e4hteviss\u00e4 osoitteissa.\n \n \n \n New receiving address\n Uusi vastaanottava osoite\n \n \n \n New sending address\n Uusi l\u00e4hett\u00e4v\u00e4 osoite\n \n \n \n Edit receiving address\n Muokkaa vastaanottajan osoitetta\n \n \n \n Edit sending address\n Muokkaa l\u00e4htev\u00e4\u00e4 osoitetta\n \n \n \n The entered address "%1" is already in the address book.\n Osoite "%1" on jo osoitekirjassa.\n \n \n \n The entered address "%1" is not a valid Bitcoin address.\n \n \n \n \n Could not unlock wallet.\n Lompakkoa ei voitu avata.\n \n \n \n New key generation failed.\n Uuden avaimen luonti ep\u00e4onnistui.\n \n\n\n HelpMessageBox\n \n \n \n Bitcoin-Qt\n \n \n \n \n version\n \n \n \n \n Usage:\n K\u00e4ytt\u00f6:\n \n \n \n options\n \n \n \n \n UI options\n \n \n \n \n Set language, for example "de_DE" (default: system locale)\n Set language, for example "de_DE" (default: system locale)\n \n \n \n Start minimized\n K\u00e4ynnist\u00e4 pienennettyn\u00e4\n \n \n \n Show splash screen on startup (default: 1)\n N\u00e4yt\u00e4 aloitusruutu k\u00e4ynnistett\u00e4ess\u00e4 (oletus: 1)\n \n\n\n MainOptionsPage\n \n \n Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.\n \n \n \n \n Pay transaction &fee\n Maksa rahansiirtopalkkio\n \n \n \n Main\n Yleiset\n \n \n \n Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.\n Vapaaehtoinen rahansiirtopalkkio per kB auttaa nopeuttamaan siirtoja. Useimmat rahansiirrot ovat 1 kB. 0.01 palkkio on suositeltava.\n \n \n \n &Start Bitcoin on system login\n \n \n \n \n Automatically start Bitcoin after logging in to the system\n \n \n \n \n &Detach databases at shutdown\n \n \n\n\n MessagePage\n \n \n Sign Message\n \n \n \n \n You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.\n Voit allekirjoittaa viestit omalla osoitteellasi todistaaksesi ett\u00e4 omistat ne. Ole huolellinen, ett\u00e4 et allekirjoita mit\u00e4\u00e4n ep\u00e4m\u00e4\u00e4r\u00e4ist\u00e4, phishing-hy\u00f6kk\u00e4\u00e4j\u00e4t voivat huijata sinua allekirjoittamaan luovuttamalla henkil\u00f6llisyytesi. Allekirjoita selvitys t\u00e4ysin yksityiskohtaisesti mihin olet sitoutunut.\n \n \n \n The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)\n Osoite mill\u00e4 viesti allekirjoitetaan (esim. \n1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)\n \n \n \n Choose adress from address book\n Valitse osoite osoitekirjasta\n \n \n \n Alt+A\n Alt+A\n \n \n \n Paste address from clipboard\n Liit\u00e4 osoite leikep\u00f6yd\u00e4lt\u00e4\n \n \n \n Alt+P\n Alt+P\n \n \n \n Enter the message you want to sign here\n Kirjoita t\u00e4h\u00e4n viesti mink\u00e4 haluat allekirjoittaa\n \n \n \n Copy the current signature to the system clipboard\n \n \n \n \n &Copy Signature\n \n \n \n \n Reset all sign message fields\n \n \n \n \n Clear &All\n \n \n \n \n Click "Sign Message" to get signature\n Klikkaa "Allekirjoita viesti" saadaksesi allekirjoituksen\n \n \n \n Sign a message to prove you own this address\n Allekirjoita viesti mill\u00e4 todistat omistavasi t\u00e4m\u00e4n osoitteen\n \n \n \n &Sign Message\n &Allekirjoita viesti\n \n \n \n Enter a Bitcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)\n Anna Bitcoin-osoite (esim. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)\n \n \n \n \n \n \n Error signing\n Virhe allekirjoitettaessa\n \n \n \n %1 is not a valid address.\n %1 ei ole kelvollinen osoite.\n \n \n \n %1 does not refer to a key.\n \n \n \n \n Private key for %1 is not available.\n Yksityisavain %1 :lle ei ole saatavilla.\n \n \n \n Sign failed\n Allekirjoittaminen ep\u00e4onnistui\n \n\n\n NetworkOptionsPage\n \n \n Network\n \n \n \n \n Map port using &UPnP\n Portin uudelleenohjaus &UPnP:ll\u00e4\n \n \n \n Automatically open the Bitcoin client port on the router. This only works when your router supports UPnP and it is enabled.\n Avaa Bitcoin-asiakasohjelman portti reitittimell\u00e4 automaattisesti. T\u00e4m\u00e4 toimii vain, jos reitittimesi tukee UPnP:t\u00e4 ja se on k\u00e4yt\u00f6ss\u00e4.\n \n \n \n &Connect through SOCKS4 proxy:\n &Yhdist\u00e4 SOCKS4-v\u00e4lityspalvelimen kautta:\n \n \n \n Connect to the Bitcon network through a SOCKS4 proxy (e.g. when connecting through Tor)\n Yhdist\u00e4 Bitcoin-verkkoon SOCKS4-v\u00e4lityspalvelimen kautta (esimerkiksi k\u00e4ytt\u00e4ess\u00e4 Tor:ia)\n \n \n \n Proxy &IP:\n \n \n \n \n &Port:\n \n \n \n \n IP address of the proxy (e.g. 127.0.0.1)\n V\u00e4lityspalvelimen IP-osoite (esim. 127.0.0.1)\n \n \n \n Port of the proxy (e.g. 1234)\n Portti, johon Bitcoin-asiakasohjelma yhdist\u00e4\u00e4 (esim. 1234)\n \n\n\n OptionsDialog\n \n \n Options\n Asetukset\n \n\n\n OverviewPage\n \n \n Form\n Lomake\n \n \n \n \n The displayed information may be out of date. Your wallet automatically synchronizes with the Bitcoin network after a connection is established, but this process has not completed yet.\n \n \n \n \n Balance:\n Saldo:\n \n \n \n Number of transactions:\n Rahansiirtojen lukum\u00e4\u00e4r\u00e4:\n \n \n \n Unconfirmed:\n Vahvistamatta:\n \n \n \n Wallet\n Lompakko\n \n \n \n <b>Recent transactions</b>\n <b>Viimeisimm\u00e4t rahansiirrot</b>\n \n \n \n Your current balance\n Tilill\u00e4si t\u00e4ll\u00e4 hetkell\u00e4 olevien Bitcoinien m\u00e4\u00e4r\u00e4\n \n \n \n Total of transactions that have yet to be confirmed, and do not yet count toward the current balance\n Niiden saapuvien rahansiirtojen m\u00e4\u00e4r\u00e4, joita Bitcoin-verkko ei viel\u00e4 ole ehtinyt vahvistaa ja siten eiv\u00e4t viel\u00e4 n\u00e4y saldossa.\n \n \n \n Total number of transactions in wallet\n Lompakolla tehtyjen rahansiirtojen yhteism\u00e4\u00e4r\u00e4\n \n \n \n \n out of sync\n \n \n\n\n QRCodeDialog\n \n \n QR Code Dialog\n \n \n \n \n QR Code\n QR-koodi\n \n \n \n Request Payment\n Vastaanota maksu\n \n \n \n Amount:\n M\u00e4\u00e4r\u00e4:\n \n \n \n BTC\n BTC\n \n \n \n Label:\n Tunniste:\n \n \n \n Message:\n Viesti:\n \n \n \n &Save As...\n &Tallenna nimell\u00e4...\n \n \n \n Error encoding URI into QR Code.\n \n \n \n \n Resulting URI too long, try to reduce the text for label / message.\n Tuloksen URI liian pitk\u00e4, yrit\u00e4 lyhent\u00e4\u00e4 otsikon teksti\u00e4 / viesti\u00e4.\n \n \n \n Save QR Code\n \n \n \n \n PNG Images (*.png)\n PNG kuvat (*png)\n \n\n\n RPCConsole\n \n \n Bitcoin debug window\n \n \n \n \n Client name\n \n \n \n \n \n \n \n \n \n \n \n \n N/A\n \n \n \n \n Client version\n \n \n \n \n &Information\n \n \n \n \n Client\n \n \n \n \n Startup time\n \n \n \n \n Network\n \n \n \n \n Number of connections\n \n \n \n \n On testnet\n \n \n \n \n Block chain\n \n \n \n \n Current number of blocks\n \n \n \n \n Estimated total blocks\n \n \n \n \n Last block time\n \n \n \n \n Debug logfile\n \n \n \n \n Open the Bitcoin debug logfile from the current data directory. This can take a few seconds for large logfiles.\n \n \n \n \n &Open\n \n \n \n \n &Console\n \n \n \n \n Build date\n \n \n \n \n Clear console\n \n \n \n \n Welcome to the Bitcoin RPC console.\n \n \n \n \n Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.\n \n \n \n \n Type <b>help</b> for an overview of available commands.\n \n \n\n\n SendCoinsDialog\n \n \n \n \n \n \n \n \n \n Send Coins\n L\u00e4het\u00e4 Bitcoineja\n \n \n \n Send to multiple recipients at once\n L\u00e4het\u00e4 monelle vastaanottajalle\n \n \n \n &Add Recipient\n \n \n \n \n Remove all transaction fields\n Poista kaikki rahansiirtokent\u00e4t\n \n \n \n Clear &All\n \n \n \n \n Balance:\n Saldo:\n \n \n \n 123.456 BTC\n 123,456 BTC\n \n \n \n Confirm the send action\n Vahvista l\u00e4hetys\n \n \n \n &Send\n &L\u00e4het\u00e4\n \n \n \n <b>%1</b> to %2 (%3)\n <b>%1</b> to %2 (%3)\n \n \n \n Confirm send coins\n Hyv\u00e4ksy Bitcoinien l\u00e4hett\u00e4minen\n \n \n \n Are you sure you want to send %1?\n Haluatko varmasti l\u00e4hett\u00e4\u00e4 %1?\n \n \n \n and \n ja \n \n \n \n The recepient address is not valid, please recheck.\n Vastaanottajan osoite ei kelpaa, ole hyv\u00e4 ja tarkista\n \n \n \n The amount to pay must be larger than 0.\n Maksettavan summan tulee olla suurempi kuin 0 Bitcoinia.\n \n \n \n The amount exceeds your balance.\n \n \n \n \n The total exceeds your balance when the %1 transaction fee is included.\n \n \n \n \n Duplicate address found, can only send to each address once per send operation.\n \n \n \n \n Error: Transaction creation failed.\n \n \n \n \n Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.\n \n \n\n\n SendCoinsEntry\n \n \n Form\n Lomake\n \n \n \n A&mount:\n M&\u00e4\u00e4r\u00e4:\n \n \n \n Pay &To:\n Maksun saaja:\n \n \n \n \n Enter a label for this address to add it to your address book\n Anna nimi t\u00e4lle osoitteelle, jos haluat lis\u00e4t\u00e4 sen osoitekirjaan\n \n \n \n &Label:\n &Nimi:\n \n \n \n The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)\n Osoite, johon Bitcoinit l\u00e4hetet\u00e4\u00e4n (esim. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)\n \n \n \n Choose address from address book\n Valitse osoite osoitekirjasta\n \n \n \n Alt+A\n Alt+A\n \n \n \n Paste address from clipboard\n Liit\u00e4 osoite leikep\u00f6yd\u00e4lt\u00e4\n \n \n \n Alt+P\n Alt+P\n \n \n \n Remove this recipient\n Poista \n \n \n \n Enter a Bitcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)\n Anna Bitcoin-osoite (esim. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)\n \n\n\n TransactionDesc\n \n \n Open for %1 blocks\n Avoinna %1 lohkolle\n \n \n \n Open until %1\n Avoinna %1 asti\n \n \n \n %1/offline?\n %1/ei linjalla?\n \n \n \n %1/unconfirmed\n %1/vahvistamaton\n \n \n \n %1 confirmations\n %1 vahvistusta\n \n \n \n <b>Status:</b> \n <b>Tila:</b> \n \n \n \n , has not been successfully broadcast yet\n , ei ole viel\u00e4 onnistuneesti l\u00e4hetetty\n \n \n \n , broadcast through %1 node\n , l\u00e4hetet\u00e4\u00e4n %1 solmun kautta\n \n \n \n , broadcast through %1 nodes\n , l\u00e4hetet\u00e4\u00e4n %1 solmun kautta\n \n \n \n <b>Date:</b> \n <b>P\u00e4iv\u00e4:</b> \n \n \n \n <b>Source:</b> Generated<br>\n <b>L\u00e4hde:</b> Generoitu<br>\n \n \n \n \n <b>From:</b> \n <b>L\u00e4hett\u00e4j\u00e4:</b> \n \n \n \n unknown\n tuntematon\n \n \n \n \n \n <b>To:</b> \n <b>Vast. ott.:</b>\n \n \n \n (yours, label: \n (sinun, tunniste: \n \n \n \n (yours)\n (sinun)\n \n \n \n \n \n \n <b>Credit:</b> \n <b>Krediitti:</b> \n \n \n \n (%1 matures in %2 more blocks)\n (%1 er\u00e4\u00e4ntyy %2 useammassa lohkossa)\n \n \n \n (not accepted)\n (ei hyv\u00e4ksytty)\n \n \n \n \n \n <b>Debit:</b> \n <b>Debit:</b> \n \n \n \n <b>Transaction fee:</b> \n <b>Rahansiirtomaksu:</b> \n \n \n \n <b>Net amount:</b> \n <b>Nettom\u00e4\u00e4r\u00e4:</b> \n \n \n \n Message:\n Viesti:\n \n \n \n Comment:\n Kommentti:\n \n \n \n Transaction ID:\n Rahansiirron ID:\n \n \n \n Generated coins must wait 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, it will change to "not accepted" and not be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.\n Luotujen kolikoiden on odotettava 120 lohkoa ennen kuin ne voidaan k\u00e4ytt\u00e4\u00e4. Kun loit t\u00e4m\u00e4n lohkon, se l\u00e4hetettiin verkkoon lis\u00e4tt\u00e4v\u00e4ksi lohkoketjuun. Jos se ep\u00e4onnistuu ketjuun liittymisess\u00e4, sen tila muuttuu "ei hyv\u00e4ksytty" eik\u00e4 sit\u00e4 voi k\u00e4ytt\u00e4\u00e4. T\u00e4t\u00e4 voi silloin t\u00e4ll\u00f6in esiinty\u00e4 jos toinen solmu luo lohkon muutamia sekunteja omastasi.\n \n\n\n TransactionDescDialog\n \n \n Transaction details\n Rahansiirron yksityiskohdat\n \n \n \n This pane shows a detailed description of the transaction\n T\u00e4m\u00e4 ruutu n\u00e4ytt\u00e4\u00e4 yksityiskohtaisen tiedon rahansiirrosta\n \n\n\n TransactionTableModel\n \n \n Date\n P\u00e4iv\u00e4m\u00e4\u00e4r\u00e4\n \n \n \n Type\n Laatu\n \n \n \n Address\n Osoite\n \n \n \n Amount\n M\u00e4\u00e4r\u00e4\n \n \n \n Open for %n block(s)\n Auki %n lohkolleAuki %n lohkoille\n \n \n \n Open until %1\n Avoinna %1 asti\n \n \n \n Offline (%1 confirmations)\n Ei yhteytt\u00e4 verkkoon (%1 vahvistusta)\n \n \n \n Unconfirmed (%1 of %2 confirmations)\n Vahvistamatta (%1/%2 vahvistusta)\n \n \n \n Confirmed (%1 confirmations)\n Vahvistettu (%1 vahvistusta)\n \n \n \n Mined balance will be available in %n more blocks\n Louhittu saldo tulee saataville %n lohkossaLouhittu saldo tulee saataville %n lohkossa\n \n \n \n This block was not received by any other nodes and will probably not be accepted!\n T\u00e4t\u00e4 lohkoa ei vastaanotettu mist\u00e4\u00e4n muusta solmusta ja sit\u00e4 ei mahdollisesti hyv\u00e4ksyt\u00e4!\n \n \n \n Generated but not accepted\n Generoitu mutta ei hyv\u00e4ksytty\n \n \n \n Received with\n Vastaanotettu osoitteella\n \n \n \n Received from\n Vastaanotettu\n \n \n \n Sent to\n Saaja\n \n \n \n Payment to yourself\n Maksu itsellesi\n \n \n \n Mined\n Louhittu\n \n \n \n (n/a)\n (ei saatavilla)\n \n \n \n Transaction status. Hover over this field to show number of confirmations.\n Rahansiirron tila. Siirr\u00e4 osoitin kent\u00e4n p\u00e4\u00e4lle n\u00e4hd\u00e4ksesi vahvistusten lukum\u00e4\u00e4r\u00e4.\n \n \n \n Date and time that the transaction was received.\n Rahansiirron vastaanottamisen p\u00e4iv\u00e4m\u00e4\u00e4r\u00e4 ja aika.\n \n \n \n Type of transaction.\n Rahansiirron laatu.\n \n \n \n Destination address of transaction.\n Rahansiirron kohteen Bitcoin-osoite\n \n \n \n Amount removed from or added to balance.\n Saldoon lis\u00e4tty tai siit\u00e4 v\u00e4hennetty m\u00e4\u00e4r\u00e4.\n \n\n\n TransactionView\n \n \n \n All\n Kaikki\n \n \n \n Today\n T\u00e4n\u00e4\u00e4n\n \n \n \n This week\n T\u00e4ll\u00e4 viikolla\n \n \n \n This month\n T\u00e4ss\u00e4 kuussa\n \n \n \n Last month\n Viime kuussa\n \n \n \n This year\n T\u00e4n\u00e4 vuonna\n \n \n \n Range...\n Alue...\n \n \n \n Received with\n Vastaanotettu osoitteella\n \n \n \n Sent to\n Saaja\n \n \n \n To yourself\n Itsellesi\n \n \n \n Mined\n Louhittu\n \n \n \n Other\n Muu\n \n \n \n Enter address or label to search\n Anna etsitt\u00e4v\u00e4 osoite tai tunniste\n \n \n \n Min amount\n Minimim\u00e4\u00e4r\u00e4\n \n \n \n Copy address\n Kopioi osoite\n \n \n \n Copy label\n Kopioi nimi\n \n \n \n Copy amount\n Kopioi m\u00e4\u00e4r\u00e4\n \n \n \n Edit label\n Muokkaa nime\u00e4\n \n \n \n Show transaction details\n \n \n \n \n Export Transaction Data\n Vie rahansiirron tiedot\n \n \n \n Comma separated file (*.csv)\n Comma separated file (*.csv)\n \n \n \n Confirmed\n Vahvistettu\n \n \n \n Date\n Aika\n \n \n \n Type\n Laatu\n \n \n \n Label\n Nimi\n \n \n \n Address\n Osoite\n \n \n \n Amount\n M\u00e4\u00e4r\u00e4\n \n \n \n ID\n ID\n \n \n \n Error exporting\n Virhe tietojen vienniss\u00e4\n \n \n \n Could not write to file %1.\n Ei voida kirjoittaa tiedostoon %1.\n \n \n \n Range:\n Alue:\n \n \n \n to\n kenelle\n \n\n\n VerifyMessageDialog\n \n \n Verify Signed Message\n \n \n \n \n Enter the message and signature below (be careful to correctly copy newlines, spaces, tabs and other invisible characters) to obtain the Bitcoin address used to sign the message.\n \n \n \n \n Verify a message and obtain the Bitcoin address used to sign the message\n \n \n \n \n &Verify Message\n \n \n \n \n Copy the currently selected address to the system clipboard\n Kopioi valittu osoite leikep\u00f6yd\u00e4lle\n \n \n \n &Copy Address\n \n \n \n \n Reset all verify message fields\n \n \n \n \n Clear &All\n \n \n \n \n Enter Bitcoin signature\n \n \n \n \n Click "Verify Message" to obtain address\n \n \n \n \n \n Invalid Signature\n \n \n \n \n The signature could not be decoded. Please check the signature and try again.\n \n \n \n \n The signature did not match the message digest. Please check the signature and try again.\n \n \n \n \n Address not found in address book.\n \n \n \n \n Address found in address book: %1\n \n \n\n\n WalletModel\n \n \n Sending...\n L\u00e4hetet\u00e4\u00e4n...\n \n\n\n WindowOptionsPage\n \n \n Window\n \n \n \n \n &Minimize to the tray instead of the taskBHC\n &Pienenn\u00e4 ilmaisinalueelle ty\u00f6kalurivin sijasta\n \n \n \n Show only a tray icon after minimizing the window\n N\u00e4yt\u00e4 ainoastaan pikkukuvake ikkunan pienent\u00e4misen j\u00e4lkeen\n \n \n \n M&inimize on close\n \n \n \n \n Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.\n Ikkunaa suljettaessa vain pienent\u00e4\u00e4 Bitcoin-ohjelman ikkunan lopettamatta itse ohjelmaa. Kun t\u00e4m\u00e4 asetus on valittuna, ohjelman voi sulkea vain valitsemalla Lopeta ohjelman valikosta.\n \n\n\n bitcoin-core\n \n \n Bitcoin version\n Bitcoinin versio\n \n \n \n Usage:\n K\u00e4ytt\u00f6:\n \n \n \n Send command to -server or bitcoind\n L\u00e4het\u00e4 k\u00e4sky palvelimelle tai bitcoind:lle\n \n \n \n List commands\n Lista komennoista\n \n \n \n Get help for a command\n Hanki apua k\u00e4skyyn\n \n \n \n Options:\n Asetukset:\n \n \n \n Specify configuration file (default: bitcoin.conf)\n M\u00e4\u00e4rit\u00e4 asetustiedosto (oletus: bitcoin.conf)\n \n \n \n Specify pid file (default: bitcoind.pid)\n M\u00e4\u00e4rit\u00e4 pid-tiedosto (oletus: bitcoin.pid)\n \n \n \n Generate coins\n Generoi kolikoita\n \n \n \n Don't generate coins\n \u00c4l\u00e4 generoi kolikoita\n \n \n \n Specify data directory\n M\u00e4\u00e4rit\u00e4 data-hakemisto\n \n \n \n Set database cache size in megabytes (default: 25)\n Aseta tietokannan v\u00e4limuistin koko megatavuina (oletus: 25)\n \n \n \n Set database disk log size in megabytes (default: 100)\n Aseta tietokannan lokitiedoston koko megatavuina (oletus: 100)\n \n \n \n Specify connection timeout (in milliseconds)\n M\u00e4\u00e4rit\u00e4 yhteyden aikakatkaisu (millisekunneissa)\n \n \n \n Listen for connections on <port> (default: 8333 or testnet: 18333)\n Kuuntele yhteyksi\u00e4 portista <port> (oletus: 8333 tai testnet: 18333)\n \n \n \n Maintain at most <n> connections to peers (default: 125)\n Pid\u00e4 enint\u00e4\u00e4n <n> yhteytt\u00e4 verkkoihin (oletus: 125)\n \n \n \n Connect only to the specified node\n Muodosta yhteys vain tiettyyn solmuun\n \n \n \n Connect to a node to retrieve peer addresses, and disconnect\n \n \n \n \n Specify your own public address\n \n \n \n \n Only connect to nodes in network <net> (IPv4 or IPv6)\n \n \n \n \n Try to discover public IP address (default: 1)\n \n \n \n \n Bind to given address. Use [host]:port notation for IPv6\n \n \n \n \n Threshold for disconnecting misbehaving peers (default: 100)\n Kynnysarvo aikakatkaisulle heikosti toimiville verkoille (oletus: 100)\n \n \n \n Number of seconds to keep misbehaving peers from reconnecting (default: 86400)\n Sekuntien m\u00e4\u00e4r\u00e4, kuinka kauan uudelleenkytkeydyt\u00e4\u00e4n verkkoihin (oletus: 86400)\n \n \n \n Maximum per-connection receive buffer, <n>*1000 bytes (default: 10000)\n Maksimi verkkoyhteyden vastaanottopuskuri, <n>*1000 tavua (oletus: 10000)\n \n \n \n Maximum per-connection send buffer, <n>*1000 bytes (default: 10000)\n Maksimi verkkoyhteyden l\u00e4hetyspuskuri, <n>*1000 tavua (oletus: 10000)\n \n \n \n Detach block and address databases. Increases shutdown time (default: 0)\n \n \n \n \n Accept command line and JSON-RPC commands\n Hyv\u00e4ksy merkkipohjaiset- ja JSON-RPC-k\u00e4skyt\n \n \n \n Run in the background as a daemon and accept commands\n Aja taustalla daemonina ja hyv\u00e4ksy komennot\n \n \n \n Use the test network\n K\u00e4yt\u00e4 test -verkkoa\n \n \n \n Output extra debugging information\n Tulosta ylim\u00e4\u00e4r\u00e4ist\u00e4 debuggaustietoa\n \n \n \n Prepend debug output with timestamp\n Lis\u00e4\u00e4 debuggaustiedon tulostukseen aikaleima\n \n \n \n Send trace/debug info to console instead of debug.log file\n L\u00e4het\u00e4 j\u00e4ljitys/debug-tieto konsoliin, debug.log-tiedoston sijaan\n \n \n \n Send trace/debug info to debugger\n L\u00e4het\u00e4 j\u00e4ljitys/debug-tieto debuggeriin\n \n \n \n Username for JSON-RPC connections\n K\u00e4ytt\u00e4j\u00e4tunnus JSON-RPC-yhteyksille\n \n \n \n Password for JSON-RPC connections\n Salasana JSON-RPC-yhteyksille\n \n \n \n Listen for JSON-RPC connections on <port> (default: 8332)\n Kuuntele JSON-RPC -yhteyksi\u00e4 portista <port> (oletus: 8332)\n \n \n \n Allow JSON-RPC connections from specified IP address\n Salli JSON-RPC yhteydet tietyst\u00e4 ip-osoitteesta\n \n \n \n Send commands to node running on <ip> (default: 127.0.0.1)\n L\u00e4het\u00e4 k\u00e4skyj\u00e4 solmuun osoitteessa <ip> (oletus: 127.0.0.1)\n \n \n \n Execute command when the best block changes (%s in cmd is replaced by block hash)\n Suorita k\u00e4sky kun paras lohko muuttuu (%s cmd on vaihdettu block hashin kanssa)\n \n \n \n Upgrade wallet to latest format\n P\u00e4ivit\u00e4 lompakko uusimpaan formaattiin\n \n \n \n Set key pool size to <n> (default: 100)\n Aseta avainpoolin koko arvoon <n> (oletus: 100)\n \n \n \n Rescan the block chain for missing wallet transactions\n Skannaa uudelleen lohkoketju lompakon puuttuvien rahasiirtojen vuoksi\n \n \n \n How many blocks to check at startup (default: 2500, 0 = all)\n Kuinka monta lohkoa tarkistetaan k\u00e4ynnistett\u00e4ess\u00e4 (oletus: 2500, 0 = kaikki)\n \n \n \n How thorough the block verification is (0-6, default: 1)\n Kuinka tiukka lohkovarmistus on (0-6, oletus: 1)\n \n \n \n Imports blocks from external blk000?.dat file\n \n \n \n \n \nSSL options: (see the Bitcoin Wiki for SSL setup instructions)\n SSL-asetukset: (lis\u00e4tietoja Bitcoin-Wikist\u00e4)\n \n \n \n Use OpenSSL (https) for JSON-RPC connections\n K\u00e4yt\u00e4 OpenSSL:\u00e4\u00e4 (https) JSON-RPC-yhteyksille\n \n \n \n Server certificate file (default: server.cert)\n Palvelimen sertifikaatti-tiedosto (oletus: server.cert)\n \n \n \n Server private key (default: server.pem)\n Palvelimen yksityisavain (oletus: server.pem)\n \n \n \n Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)\n Hyv\u00e4ksytt\u00e4v\u00e4 salaus (oletus:\nTLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)\n \n \n \n Warning: Disk space is low\n \n \n \n \n This help message\n T\u00e4m\u00e4 ohjeviesti\n \n \n \n Cannot obtain a lock on data directory %s. Bitcoin is probably already running.\n En p\u00e4\u00e4se k\u00e4siksi data-hakemiston lukitukseen %s. Bitcoin on todenn\u00e4k\u00f6isesti jo k\u00e4ynnistetty.\n \n \n \n Bitcoin\n Bitcoin\n \n \n \n Unable to bind to %s on this computer (bind returned error %d, %s)\n \n \n \n \n Connect through socks proxy\n \n \n \n \n Select the version of socks proxy to use (4 or 5, 5 is default)\n \n \n \n \n Do not use proxy for connections to network <net> (IPv4 or IPv6)\n \n \n \n \n Allow DNS lookups for -addnode, -seednode and -connect\n \n \n \n \n Pass DNS requests to (SOCKS5) proxy\n \n \n \n \n Loading addresses...\n Ladataan osoitteita...\n \n \n \n Error loading blkindex.dat\n Virhe ladattaessa blkindex.dat-tiedostoa\n \n \n \n Error loading wallet.dat: Wallet corrupted\n Virhe ladattaessa wallet.dat-tiedostoa: Lompakko vioittunut\n \n \n \n Error loading wallet.dat: Wallet requires newer version of Bitcoin\n Virhe ladattaessa wallet.dat-tiedostoa: Tarvitset uudemman version Bitcoinista\n \n \n \n Wallet needed to be rewritten: restart Bitcoin to complete\n Lompakko tarvitsee uudelleenkirjoittaa: k\u00e4ynnist\u00e4 Bitcoin uudelleen\n \n \n \n Error loading wallet.dat\n Virhe ladattaessa wallet.dat-tiedostoa\n \n \n \n Invalid -proxy address: '%s'\n \n \n \n \n Unknown network specified in -noproxy: '%s'\n \n \n \n \n Unknown network specified in -onlynet: '%s'\n \n \n \n \n Unknown -socks proxy version requested: %i\n \n \n \n \n Cannot resolve -bind address: '%s'\n \n \n \n \n Not listening on any port\n \n \n \n \n Cannot resolve -externalip address: '%s'\n \n \n \n \n Invalid amount for -paytxfee=<amount>: '%s'\n \n \n \n \n Error: could not start node\n \n \n \n \n Error: Wallet locked, unable to create transaction \n Virhe: Lompakko on lukittu, rahansiirtoa ei voida luoda\n \n \n \n Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds \n Virhe: T\u00e4m\u00e4 rahansiirto vaatii rahansiirtopalkkion v\u00e4hint\u00e4\u00e4n %s johtuen sen m\u00e4\u00e4r\u00e4st\u00e4, monimutkaisuudesta tai hiljattain vastaanotettujen summien k\u00e4yt\u00f6st\u00e4\n \n \n \n Error: Transaction creation failed \n Virhe: Rahansiirron luonti ep\u00e4onnistui\n \n \n \n Sending...\n L\u00e4hetet\u00e4\u00e4n...\n \n \n \n Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.\n Virhe: Rahansiirto hyl\u00e4ttiin. T\u00e4m\u00e4 voi tapahtua jos jotkin bitcoineistasi on jo k\u00e4ytetty, esimerkiksi jos olet k\u00e4ytt\u00e4nyt kopiota wallet.dat-lompakkotiedostosta ja bitcoinit on merkitty k\u00e4ytetyksi vain kopiossa.\n \n \n \n Invalid amount\n Virheellinen m\u00e4\u00e4r\u00e4\n \n \n \n Insufficient funds\n Lompakon saldo ei riit\u00e4\n \n \n \n Loading block index...\n Ladataan lohkoindeksi\u00e4...\n \n \n \n Add a node to connect to and attempt to keep the connection open\n Lin\u00e4\u00e4 solmu mihin liitty\u00e4 pit\u00e4\u00e4ksesi yhteyden auki\n \n \n \n Unable to bind to %s on this computer. Bitcoin is probably already running.\n \n \n \n \n Find peers using internet relay chat (default: 0)\n Etsi solmuja k\u00e4ytt\u00e4en internet relay chatia (oletus: 0)\n \n \n \n Accept connections from outside (default: 1)\n Hyv\u00e4ksyt\u00e4\u00e4n ulkopuoliset yhteydet (oletus: 1)\n \n \n \n Find peers using DNS lookup (default: 1)\n Etsi solmuja k\u00e4ytt\u00e4m\u00e4ll\u00e4 DNS hakua (oletus: 1)\n \n \n \n Use Universal Plug and Play to map the listening port (default: 1)\n K\u00e4yt\u00e4 Plug and Play kartoitusta kuunnellaksesi porttia (oletus: 1)\n \n \n \n Use Universal Plug and Play to map the listening port (default: 0)\n K\u00e4yt\u00e4 Plug and Play kartoitusta kuunnellaksesi porttia (oletus: 0)\n \n \n \n Fee per KB to add to transactions you send\n Rahansiirtopalkkio per KB lis\u00e4t\u00e4\u00e4n l\u00e4hett\u00e4m\u00e4\u00e4si rahansiirtoon\n \n \n \n Warning: -paytxfee is set very high. This is the transaction fee you will pay if you send a transaction.\n \n \n \n \n Loading wallet...\n Ladataan lompakkoa...\n \n \n \n Cannot downgrade wallet\n Et voi p\u00e4ivitt\u00e4\u00e4 lompakkoasi vanhempaan versioon\n \n \n \n Cannot initialize keypool\n Avainvarastoa ei voi alustaa\n \n \n \n Cannot write default address\n Oletusosoitetta ei voi kirjoittaa\n \n \n \n Rescanning...\n Skannataan uudelleen...\n \n \n \n Done loading\n Lataus on valmis\n \n \n \n To use the %s option\n K\u00e4yt\u00e4 %s optiota\n \n \n \n %s, you must set a rpcpassword in the configuration file:\n %s\nIt is recommended you use the following random password:\nrpcuser=bitcoinrpc\nrpcpassword=%s\n(you do not need to remember this password)\nIf the file does not exist, create it with owner-readable-only file permissions.\n\n %s, sinun t\u00e4ytyy asettaa rpcpassword asetustiedostoon:\n%s\nOn suositeltavaa k\u00e4ytt\u00e4\u00e4 seuraavaan satunnaista salasanaa:\nrpcuser=bitcoinrpc\nrpcpassword=%s\n(sinun ei tarvitse muistaa t\u00e4t\u00e4 salasanaa)\nJos tiedostoa ei ole, niin luo se ainoastaan omistajan kirjoitusoikeuksin.\n\n \n \n \n Error\n Virhe\n \n \n \n An error occured while setting up the RPC port %i for listening: %s\n Virhe asetettaessa RCP-porttia %i kuunteluun: %s\n \n \n \n You must set rpcpassword=<password> in the configuration file:\n%s\nIf the file does not exist, create it with owner-readable-only file permissions.\n Sinun t\u00e4ytyy asettaa rpcpassword=<password> asetustiedostoon:\n%s\nJos tiedostoa ei ole, niin luo se ainoastaan omistajan kirjoitusoikeuksin.\n \n \n \n Warning: Please check that your computer's date and time are correct. If your clock is wrong Bitcoin will not work properly.\n Varoitus: Tarkista, ovatko tietokoneesi p\u00e4iv\u00e4m\u00e4\u00e4r\u00e4 ja aika oikein. Mik\u00e4li aika on v\u00e4\u00e4rin, Bitcoin-ohjelma ei toimi oikein.\n \n\n", "meta": {"content_hash": "9d19f82a369d9fc9870815d3fb970aa3", "timestamp": "", "source": "github", "line_count": 2520, "max_line_length": 383, "avg_line_length": 43.58015873015873, "alnum_prop": 0.6466737083644443, "repo_name": "IeuanG/BHC", "id": "9008c1e3b5004ce446d6ca674ab85e7de1315c92", "size": "110359", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/qt/locale/bitcoin_fi.ts", "mode": "33261", "license": "mit", "language": [{"name": "C", "bytes": "14758"}, {"name": "C++", "bytes": "1432339"}, {"name": "Erlang", "bytes": "6839"}, {"name": "JavaScript", "bytes": "12"}, {"name": "Makefile", "bytes": "79169"}, {"name": "Objective-C++", "bytes": "2463"}, {"name": "PHP", "bytes": "1948"}, {"name": "Perl", "bytes": "16929"}, {"name": "Python", "bytes": "47538"}, {"name": "Shell", "bytes": "2615"}]}} {"text": "\n\n\n\n\t\n\t\torg.oep.dossiermgt.model.PaymentFile\n\t\t\n\t\t\toep-core-dossiermgt\n\t\t\n\t\t2\n\t\t\n\t\t\t\n\t\t\t\tCREATE_PAYMENTFILE\n\t\t\t\tPERMISSIONS\n\t\t\t\tUPDATE_PAYMENTFILE\n\t\t\t\n\t\t\t\n\t\t\t\tCREATE_PAYMENTFILE\n\t\t\t\n\t\t\t\n\t\t\t\n\t\t\t\tPERMISSIONS\n\t\t\t\tUPDATE_PAYMENTFILE\n\t\t\t\n\t\t\n\t\n", "meta": {"content_hash": "02252013d43c2c9c5b20a71b33db9d63", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 164, "avg_line_length": 33.51851851851852, "alnum_prop": 0.7104972375690608, "repo_name": "openegovplatform/OEPv2", "id": "716a8f0e5568373e411e3cfd99858f2a9edf919e", "size": "905", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "oep-dossier-portlet/docroot/WEB-INF/src/resource-actions/paymentfile.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "5166"}, {"name": "Java", "bytes": "45541534"}]}} {"text": "package com.tutorialspoint.abstraction;\r\n\r\npublic class AbstractDemo {\r\n public static void main(String[] args) {\r\n /* Following is not allowed and would raise error */\r\n // Employee e = new Employee(\"George W.\", \"Houston, TX\", 43); // Compile time error\r\n Salary s = new Salary(\"Mohd Mohtashim\", \"Ambehta, UP\", 3, 3600.00);\r\n Employee e = new Salary(\"John Adams\", \"Boston, MA\", 2, 2400.00);\r\n\r\n System.out.println(\"Call mailCheck using Salary reference --\");\r\n s.mailCheck();\r\n\r\n System.out.println(\"\\nCall mailCheck using Employee reference--\");\r\n e.mailCheck();\r\n }\r\n}\r\n", "meta": {"content_hash": "80d5c9815131d8a2c7aa85e80bd0ed61", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 87, "avg_line_length": 37.4375, "alnum_prop": 0.6494156928213689, "repo_name": "antalpeti/Java-Tutorial", "id": "cb1a626739250ab543710691a5f0585ff9890a70", "size": "599", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/tutorialspoint/abstraction/AbstractDemo.java", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "549"}, {"name": "HTML", "bytes": "1147"}, {"name": "Java", "bytes": "409438"}]}} {"text": "\ufeffusing System;\nusing System.Net;\nusing System.Net.Fakes;\nusing System.Net.Http;\nusing System.Net.Http.Fakes;\nusing System.Threading.Tasks;\nusing Microsoft.QualityTools.Testing.Fakes;\nusing Microsoft.VisualStudio.TestTools.UnitTesting;\nusing Newtonsoft.Json;\nusing Xciles.Uncommon.Net;\n\nnamespace Xciles.Uncommon.Tests.Net\n{\n [TestClass]\n public class UncommonRequestHelperExceptionTests\n {\n private readonly Person _person = new Person\n {\n DateOfBirth = DateTime.Now.Subtract(new TimeSpan(800, 1, 1, 1)),\n Firstname = \"First\",\n Lastname = \"Person\",\n PhoneNumber = \"0123456789\",\n SomeString = \"This is just a string\"\n };\n\n [TestMethod]\n public void ProcessGetRequestWebExceptionTest()\n {\n ProcessGetRequestWebExceptionTestAsync().Wait();\n }\n\n private async Task ProcessGetRequestWebExceptionTestAsync()\n {\n var exceptionObject = new ExceptionObject()\n {\n Description = \"This is a test Exception Description\",\n Message = \"This is a test Exception Message\",\n Type = EType.WrongHeaders\n };\n\n using (ShimsContext.Create())\n {\n\n ShimHttpClient.AllInstances.SendAsyncHttpRequestMessageCancellationToken = (client, message, arg3) =>\n {\n return Task.FromResult(new HttpResponseMessage()\n {\n Content = new StringContent(JsonConvert.SerializeObject(exceptionObject)),\n StatusCode = HttpStatusCode.BadRequest\n });\n };\n\n try\n {\n var result = await UncommonRequestHelper.ProcessGetRequestAsync(\"http://www.xciles.com/\");\n Assert.Fail(\"Should not be able to be here...\");\n }\n catch (UncommonRequestException ex)\n {\n Assert.IsTrue(ex.RequestExceptionStatus == EUncommonRequestExceptionStatus.ServiceError);\n Assert.IsTrue(ex.StatusCode == HttpStatusCode.BadRequest);\n\n var responseResult = ex.ConvertExceptionResponseToObject();\n Assert.IsTrue(responseResult != null);\n Assert.IsTrue(responseResult.Description == exceptionObject.Description);\n Assert.IsTrue(responseResult.Message == exceptionObject.Message);\n Assert.IsTrue(responseResult.Type == exceptionObject.Type);\n }\n }\n }\n\n\n\n [TestMethod]\n public void ProcessGetRequestHttpRequestExceptionTest()\n {\n ProcessGetRequestHttpRequestExceptionTestAsync().Wait();\n }\n\n private async Task ProcessGetRequestHttpRequestExceptionTestAsync()\n {\n var exceptionObject = new ExceptionObject()\n {\n Description = \"This is a test Exception Description\",\n Message = \"This is a test Exception Message\",\n Type = EType.WrongHeaders\n };\n\n using (ShimsContext.Create())\n {\n\n ShimHttpClient.AllInstances.SendAsyncHttpRequestMessageCancellationToken = (client, message, arg3) =>\n {\n //var webEx = new WebException(\"\", WebExceptionStatus.UnknownError, )\n throw new HttpRequestException();\n };\n\n ShimHttpWebResponse.AllInstances.ResponseStreamGet = (response) =>\n {\n return ShimsContext.ExecuteWithoutShims(() => response.GetResponseStream());\n };\n\n try\n {\n var result = await UncommonRequestHelper.ProcessGetRequestAsync(\"http://www.xciles.com/\");\n Assert.Fail(\"Should not be able to be here...\");\n }\n catch (UncommonRequestException ex)\n {\n Assert.IsTrue(ex.RequestExceptionStatus == EUncommonRequestExceptionStatus.ServiceError);\n Assert.IsTrue(ex.StatusCode == HttpStatusCode.BadRequest);\n\n //var responseResult = ex.ConvertExceptionResponseToObject();\n //Assert.IsTrue(responseResult != null);\n //Assert.IsTrue(responseResult.Description == exceptionObject.Description);\n //Assert.IsTrue(responseResult.Message == exceptionObject.Message);\n //Assert.IsTrue(responseResult.Type == exceptionObject.Type);\n }\n }\n }\n }\n}\n", "meta": {"content_hash": "e0f5a5e28a0b96215ea75c8f58fcba13", "timestamp": "", "source": "github", "line_count": 122, "max_line_length": 118, "avg_line_length": 38.80327868852459, "alnum_prop": 0.5701309674693705, "repo_name": "Xciles/Uncommon", "id": "599c1d78c23361960cd716ebf69cb66d00433506", "size": "4736", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Uncommon.Tests/Net/UncommonRequestHelperExceptionTests.cs", "mode": "33188", "license": "mit", "language": [{"name": "ASP", "bytes": "109"}, {"name": "C#", "bytes": "146892"}, {"name": "CSS", "bytes": "315"}, {"name": "HTML", "bytes": "5125"}, {"name": "JavaScript", "bytes": "19032"}, {"name": "Pascal", "bytes": "525"}]}} {"text": "\n \n\n \n\n \n\n\n", "meta": {"content_hash": "a5e928b2b29f2548ce086e3527b3f0bb", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 72, "avg_line_length": 31.384615384615383, "alnum_prop": 0.6519607843137255, "repo_name": "p2plab/Nxt-Client-For-Android", "id": "9718894353d3d5ce987d2d2ff7677a5897fbd02c", "size": "816", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "res/layout/alias_input.xml", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "531831"}]}} {"text": "\n\n\n\t\n\tjQuery UI Datepicker Demos\n\t\n\n\n\n\n\n\n\n", "meta": {"content_hash": "011e5fa1504454cdbee214ef0cdcefd2", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 82, "avg_line_length": 36.54838709677419, "alnum_prop": 0.6487202118270079, "repo_name": "PhillyPUG/phillypug", "id": "37c8ef7148c7d46d3a6b916355ee6d0566bbf534", "size": "1133", "binary": false, "copies": "34", "ref": "refs/heads/master", "path": "media/development-bundle/demos/datepicker/index.html", "mode": "33188", "license": "bsd-3-clause", "language": []}} {"text": "package dilawar\n\ntype Type string\n\nconst (\n\tTypeCredit Type = \"credit\"\n\tTypeDebit Type = \"debit\"\n)\n\ntype Storable interface {\n\tCreate(*Transaction) error\n\tRead(int) *Transaction\n\tList() []Transaction\n}\n", "meta": {"content_hash": "8bf48ee2048fbe4af9d0dfa38a110c95", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 27, "avg_line_length": 14.5, "alnum_prop": 0.7241379310344828, "repo_name": "umayr/dilawar", "id": "31ea7e27bc18ed5652752d47e84a35bcf92ff53c", "size": "203", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "dilawar.go", "mode": "33188", "license": "mit", "language": [{"name": "Go", "bytes": "6636"}, {"name": "Makefile", "bytes": "1874"}]}} {"text": "extern \"C\" {\n#include \"third_party/py/scipy/optimize/Zeros/zeros.h\"\n}\n\nnamespace logistic_hd {\n\n// Integrands for the equations defined in Eq. 5 from Sur and Cand\u00e8s\n// (PNAS, 2019). These are called by the bivariate integration over Z1 and Z2\n// in `asymp_system_solve.py`.\ndouble integrand(double Z1, double Z2, double kappa, double gamma, double b0,\n double alpha, double lambda, double sigma, double beta0,\n int eq_num);\n\n// Computes the derivative of the objective that defines the proximal operator.\n// The prox operator is the value of z that makes this zero.\ndouble prox_deriv(double z, void *args);\n\ndouble sigmoid(double z);\n\n// Computes the derivative of the prox operator for the logistic regression\n// log likelihood.\ndouble prox_impl(double lambda, double x, double xtol = 1e-8,\n double rtol = 1e-8, int maxiters = 1000);\n\n// Computes the pdf of the bivariate normal without any input validation\n// because this is called many times during optimization.\ndouble pdf(double x1, double x2);\n\n// Helper function to pass values between our code and the scipy.optimize API.\ndouble scipy_zeros_functions_func(double x, void *params);\n\ntypedef struct prox_params {\n double lambda;\n double x;\n} prox_params;\n\n} // namespace logistic_hd\n\n#endif // MLE_PARAM_INTEGRANDS_H_\n", "meta": {"content_hash": "591be7b34b1395ac2119351a1f2e218f", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 79, "avg_line_length": 34.02564102564103, "alnum_prop": 0.720422004521477, "repo_name": "google-research/sloe-logistic", "id": "f3d13ad1d3b57092c932b7a4ba6143cec5890cae", "size": "2016", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "mle_param_integrands.h", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\r\n// Protocol Buffers - Google's data interchange format\r\n// Copyright 2008 Google Inc. All rights reserved.\r\n// http://github.com/jskeet/dotnet-protobufs/\r\n// Original C++/Java/Python code:\r\n// http://code.google.com/p/protobuf/\r\n//\r\n// Redistribution and use in source and binary forms, with or without\r\n// modification, are permitted provided that the following conditions are\r\n// met:\r\n//\r\n// * Redistributions of source code must retain the above copyright\r\n// notice, this list of conditions and the following disclaimer.\r\n// * Redistributions in binary form must reproduce the above\r\n// copyright notice, this list of conditions and the following disclaimer\r\n// in the documentation and/or other materials provided with the\r\n// distribution.\r\n// * Neither the name of Google Inc. nor the names of its\r\n// contributors may be used to endorse or promote products derived from\r\n// this software without specific prior written permission.\r\n//\r\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\r\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\r\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\r\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\r\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\r\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\r\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\r\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\r\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\r\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\r\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\r\n\r\n#endregion\r\n\r\nusing System;\r\nusing System.Collections;\r\nusing System.Collections.Generic;\r\nusing Google.ProtocolBuffers.Descriptors;\r\n\r\n//Disable warning CS3010: CLS-compliant interfaces must have only CLS-compliant members\r\n#pragma warning disable 3010\r\n\r\nnamespace Google.ProtocolBuffers\r\n{\r\n /// \r\n /// Provides an interface that is used write a message. Most often proto buffers are written\r\n /// in their binary form by creating a instance via the CodedOutputStream.CreateInstance\r\n /// static factory.\r\n /// \r\n public interface ICodedOutputStream\r\n {\r\n /// \r\n /// Writes any message initialization data needed to the output stream\r\n /// \r\n /// \r\n /// This is primarily used by text formats and unnecessary for protobuffers' own\r\n /// binary format. The API for MessageStart/End was added for consistent handling\r\n /// of output streams regardless of the actual writer implementation.\r\n /// \r\n void WriteMessageStart();\r\n /// \r\n /// Writes any message finalization data needed to the output stream\r\n /// \r\n /// \r\n /// This is primarily used by text formats and unnecessary for protobuffers' own\r\n /// binary format. The API for MessageStart/End was added for consistent handling\r\n /// of output streams regardless of the actual writer implementation.\r\n /// \r\n void WriteMessageEnd();\r\n /// \r\n /// Indicates that all temporary buffers be written to the final output.\r\n /// \r\n void Flush();\r\n /// \r\n /// Writes an unknown message as a group\r\n /// \r\n [Obsolete]\r\n void WriteUnknownGroup(int fieldNumber, IMessageLite value);\r\n /// \r\n /// Writes an unknown field value of bytes\r\n /// \r\n void WriteUnknownBytes(int fieldNumber, ByteString value);\r\n /// \r\n /// Writes an unknown field of a primitive type\r\n /// \r\n [CLSCompliant(false)]\r\n void WriteUnknownField(int fieldNumber, WireFormat.WireType wireType, ulong value);\r\n /// \r\n /// Writes an extension as a message-set group\r\n /// \r\n void WriteMessageSetExtension(int fieldNumber, string fieldName, IMessageLite value);\r\n /// \r\n /// Writes an unknown extension as a message-set group\r\n /// \r\n void WriteMessageSetExtension(int fieldNumber, string fieldName, ByteString value);\r\n\r\n /// \r\n /// Writes a field value, including tag, to the stream.\r\n /// \r\n void WriteField(FieldType fieldType, int fieldNumber, string fieldName, object value);\r\n\r\n /// \r\n /// Writes a double field value, including tag, to the stream.\r\n /// \r\n void WriteDouble(int fieldNumber, string fieldName, double value);\r\n\r\n /// \r\n /// Writes a float field value, including tag, to the stream.\r\n /// \r\n void WriteFloat(int fieldNumber, string fieldName, float value);\r\n\r\n /// \r\n /// Writes a uint64 field value, including tag, to the stream.\r\n /// \r\n [CLSCompliant(false)]\r\n void WriteUInt64(int fieldNumber, string fieldName, ulong value);\r\n\r\n /// \r\n /// Writes an int64 field value, including tag, to the stream.\r\n /// \r\n void WriteInt64(int fieldNumber, string fieldName, long value);\r\n\r\n /// \r\n /// Writes an int32 field value, including tag, to the stream.\r\n /// \r\n void WriteInt32(int fieldNumber, string fieldName, int value);\r\n\r\n /// \r\n /// Writes a fixed64 field value, including tag, to the stream.\r\n /// \r\n [CLSCompliant(false)]\r\n void WriteFixed64(int fieldNumber, string fieldName, ulong value);\r\n\r\n /// \r\n /// Writes a fixed32 field value, including tag, to the stream.\r\n /// \r\n [CLSCompliant(false)]\r\n void WriteFixed32(int fieldNumber, string fieldName, uint value);\r\n\r\n /// \r\n /// Writes a bool field value, including tag, to the stream.\r\n /// \r\n void WriteBool(int fieldNumber, string fieldName, bool value);\r\n\r\n /// \r\n /// Writes a string field value, including tag, to the stream.\r\n /// \r\n void WriteString(int fieldNumber, string fieldName, string value);\r\n\r\n /// \r\n /// Writes a group field value, including tag, to the stream.\r\n /// \r\n void WriteGroup(int fieldNumber, string fieldName, IMessageLite value);\r\n\r\n /// \r\n /// Writes a message field value, including tag, to the stream.\r\n /// \r\n void WriteMessage(int fieldNumber, string fieldName, IMessageLite value);\r\n\r\n /// \r\n /// Writes a byte array field value, including tag, to the stream.\r\n /// \r\n void WriteBytes(int fieldNumber, string fieldName, ByteString value);\r\n\r\n /// \r\n /// Writes a UInt32 field value, including tag, to the stream.\r\n /// \r\n [CLSCompliant(false)]\r\n void WriteUInt32(int fieldNumber, string fieldName, uint value);\r\n\r\n /// \r\n /// Writes an enum field value, including tag, to the stream.\r\n /// \r\n void WriteEnum(int fieldNumber, string fieldName, int value, object rawValue);\r\n\r\n /// \r\n /// Writes a fixed 32-bit field value, including tag, to the stream.\r\n /// \r\n void WriteSFixed32(int fieldNumber, string fieldName, int value);\r\n\r\n /// \r\n /// Writes a signed fixed 64-bit field value, including tag, to the stream.\r\n /// \r\n void WriteSFixed64(int fieldNumber, string fieldName, long value);\r\n\r\n /// \r\n /// Writes a signed 32-bit field value, including tag, to the stream.\r\n /// \r\n void WriteSInt32(int fieldNumber, string fieldName, int value);\r\n\r\n /// \r\n /// Writes a signed 64-bit field value, including tag, to the stream.\r\n /// \r\n void WriteSInt64(int fieldNumber, string fieldName, long value);\r\n\r\n /// \r\n /// Writes a repeated field value, including tag(s), to the stream.\r\n /// \r\n void WriteArray(FieldType fieldType, int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated group value, including tag(s), to the stream.\r\n /// \r\n void WriteGroupArray(int fieldNumber, string fieldName, IEnumerable list)\r\n where T : IMessageLite;\r\n\r\n /// \r\n /// Writes a repeated message value, including tag(s), to the stream.\r\n /// \r\n void WriteMessageArray(int fieldNumber, string fieldName, IEnumerable list)\r\n where T : IMessageLite;\r\n\r\n /// \r\n /// Writes a repeated string value, including tag(s), to the stream.\r\n /// \r\n void WriteStringArray(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated ByteString value, including tag(s), to the stream.\r\n /// \r\n void WriteBytesArray(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated boolean value, including tag(s), to the stream.\r\n /// \r\n void WriteBoolArray(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated Int32 value, including tag(s), to the stream.\r\n /// \r\n void WriteInt32Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated SInt32 value, including tag(s), to the stream.\r\n /// \r\n void WriteSInt32Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated UInt32 value, including tag(s), to the stream.\r\n /// \r\n void WriteUInt32Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated Fixed32 value, including tag(s), to the stream.\r\n /// \r\n void WriteFixed32Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated SFixed32 value, including tag(s), to the stream.\r\n /// \r\n void WriteSFixed32Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated Int64 value, including tag(s), to the stream.\r\n /// \r\n void WriteInt64Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated SInt64 value, including tag(s), to the stream.\r\n /// \r\n void WriteSInt64Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated UInt64 value, including tag(s), to the stream.\r\n /// \r\n void WriteUInt64Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated Fixed64 value, including tag(s), to the stream.\r\n /// \r\n void WriteFixed64Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated SFixed64 value, including tag(s), to the stream.\r\n /// \r\n void WriteSFixed64Array(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated Double value, including tag(s), to the stream.\r\n /// \r\n void WriteDoubleArray(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated Float value, including tag(s), to the stream.\r\n /// \r\n void WriteFloatArray(int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a repeated enumeration value of type T, including tag(s), to the stream.\r\n /// \r\n [CLSCompliant(false)]\r\n void WriteEnumArray(int fieldNumber, string fieldName, IEnumerable list)\r\n where T : struct, IComparable, IFormattable;\r\n\r\n /// \r\n /// Writes a packed repeated primitive, including tag and length, to the stream.\r\n /// \r\n void WritePackedArray(FieldType fieldType, int fieldNumber, string fieldName, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated boolean, including tag and length, to the stream.\r\n /// \r\n void WritePackedBoolArray(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated Int32, including tag and length, to the stream.\r\n /// \r\n void WritePackedInt32Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated SInt32, including tag and length, to the stream.\r\n /// \r\n void WritePackedSInt32Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated UInt32, including tag and length, to the stream.\r\n /// \r\n void WritePackedUInt32Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated Fixed32, including tag and length, to the stream.\r\n /// \r\n void WritePackedFixed32Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated SFixed32, including tag and length, to the stream.\r\n /// \r\n void WritePackedSFixed32Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated Int64, including tag and length, to the stream.\r\n /// \r\n void WritePackedInt64Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated SInt64, including tag and length, to the stream.\r\n /// \r\n void WritePackedSInt64Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated UInt64, including tag and length, to the stream.\r\n /// \r\n void WritePackedUInt64Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated Fixed64, including tag and length, to the stream.\r\n /// \r\n void WritePackedFixed64Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated SFixed64, including tag and length, to the stream.\r\n /// \r\n void WritePackedSFixed64Array(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated Double, including tag and length, to the stream.\r\n /// \r\n void WritePackedDoubleArray(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated Float, including tag and length, to the stream.\r\n /// \r\n void WritePackedFloatArray(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list);\r\n\r\n /// \r\n /// Writes a packed repeated enumeration of type T, including tag and length, to the stream.\r\n /// \r\n [CLSCompliant(false)]\r\n void WritePackedEnumArray(int fieldNumber, string fieldName, int calculatedSize, IEnumerable list)\r\n where T : struct, IComparable, IFormattable;\r\n }\r\n}", "meta": {"content_hash": "28a175106a29d8ad5a090d313dcec3d0", "timestamp": "", "source": "github", "line_count": 373, "max_line_length": 117, "avg_line_length": 44.49061662198391, "alnum_prop": 0.6294667068394094, "repo_name": "plutoo/protobuf-csharp-port", "id": "64c8065321ff09d8602f07ced65b1c176b33458e", "size": "16633", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "src/ProtocolBuffers/ICodedOutputStream.cs", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "ApacheConf", "bytes": "1911"}, {"name": "C#", "bytes": "7562639"}, {"name": "HTML", "bytes": "6297"}, {"name": "Protocol Buffer", "bytes": "395806"}, {"name": "Python", "bytes": "8912"}, {"name": "Shell", "bytes": "4628"}]}} {"text": "\n\n#include \n#include \n#include \n#include \n\n#include \"firmware.h\"\n#include \"chip.h\"\n\nMODULE_FIRMWARE(\"6fire/dmx6firel2.ihx\");\nMODULE_FIRMWARE(\"6fire/dmx6fireap.ihx\");\nMODULE_FIRMWARE(\"6fire/dmx6firecf.bin\");\n\nenum {\n\tFPGA_BUFSIZE = 512, FPGA_EP = 2\n};\n\n/*\n * wMaxPacketSize of pcm endpoints.\n * keep synced with rates_in_packet_size and rates_out_packet_size in pcm.c\n * fpp: frames per isopacket\n *\n * CAUTION: keep sizeof <= buffer[] in usb6fire_fw_init\n */\nstatic const u8 ep_w_max_packet_size[] = {\n\t0xe4, 0x00, 0xe4, 0x00, /* alt 1: 228 EP2 and EP6 (7 fpp) */\n\t0xa4, 0x01, 0xa4, 0x01, /* alt 2: 420 EP2 and EP6 (13 fpp)*/\n\t0x94, 0x01, 0x5c, 0x02 /* alt 3: 404 EP2 and 604 EP6 (25 fpp) */\n};\n\nstatic const u8 known_fw_versions[][2] = {\n\t{ 0x03, 0x01 }\n};\n\nstruct ihex_record {\n\tu16 address;\n\tu8 len;\n\tu8 data[256];\n\tchar error; /* true if an error occurred parsing this record */\n\n\tu8 max_len; /* maximum record length in whole ihex */\n\n\t/* private */\n\tconst char *txt_data;\n\tunsigned int txt_length;\n\tunsigned int txt_offset; /* current position in txt_data */\n};\n\nstatic u8 usb6fire_fw_ihex_hex(const u8 *data, u8 *crc)\n{\n\tu8 val = 0;\n\tint hval;\n\n\thval = hex_to_bin(data[0]);\n\tif (hval >= 0)\n\t\tval |= (hval << 4);\n\n\thval = hex_to_bin(data[1]);\n\tif (hval >= 0)\n\t\tval |= hval;\n\n\t*crc += val;\n\treturn val;\n}\n\n/*\n * returns true if record is available, false otherwise.\n * iff an error occurred, false will be returned and record->error will be true.\n */\nstatic bool usb6fire_fw_ihex_next_record(struct ihex_record *record)\n{\n\tu8 crc = 0;\n\tu8 type;\n\tint i;\n\n\trecord->error = false;\n\n\t/* find begin of record (marked by a colon) */\n\twhile (record->txt_offset < record->txt_length\n\t\t\t&& record->txt_data[record->txt_offset] != ':')\n\t\trecord->txt_offset++;\n\tif (record->txt_offset == record->txt_length)\n\t\treturn false;\n\n\t/* number of characters needed for len, addr and type entries */\n\trecord->txt_offset++;\n\tif (record->txt_offset + 8 > record->txt_length) {\n\t\trecord->error = true;\n\t\treturn false;\n\t}\n\n\trecord->len = usb6fire_fw_ihex_hex(record->txt_data +\n\t\t\trecord->txt_offset, &crc);\n\trecord->txt_offset += 2;\n\trecord->address = usb6fire_fw_ihex_hex(record->txt_data +\n\t\t\trecord->txt_offset, &crc) << 8;\n\trecord->txt_offset += 2;\n\trecord->address |= usb6fire_fw_ihex_hex(record->txt_data +\n\t\t\trecord->txt_offset, &crc);\n\trecord->txt_offset += 2;\n\ttype = usb6fire_fw_ihex_hex(record->txt_data +\n\t\t\trecord->txt_offset, &crc);\n\trecord->txt_offset += 2;\n\n\t/* number of characters needed for data and crc entries */\n\tif (record->txt_offset + 2 * (record->len + 1) > record->txt_length) {\n\t\trecord->error = true;\n\t\treturn false;\n\t}\n\tfor (i = 0; i < record->len; i++) {\n\t\trecord->data[i] = usb6fire_fw_ihex_hex(record->txt_data\n\t\t\t\t+ record->txt_offset, &crc);\n\t\trecord->txt_offset += 2;\n\t}\n\tusb6fire_fw_ihex_hex(record->txt_data + record->txt_offset, &crc);\n\tif (crc) {\n\t\trecord->error = true;\n\t\treturn false;\n\t}\n\n\tif (type == 1 || !record->len) /* eof */\n\t\treturn false;\n\telse if (type == 0)\n\t\treturn true;\n\telse {\n\t\trecord->error = true;\n\t\treturn false;\n\t}\n}\n\nstatic int usb6fire_fw_ihex_init(const struct firmware *fw,\n\t\tstruct ihex_record *record)\n{\n\trecord->txt_data = fw->data;\n\trecord->txt_length = fw->size;\n\trecord->txt_offset = 0;\n\trecord->max_len = 0;\n\t/* read all records, if loop ends, record->error indicates,\n\t * whether ihex is valid. */\n\twhile (usb6fire_fw_ihex_next_record(record))\n\t\trecord->max_len = max(record->len, record->max_len);\n\tif (record->error)\n\t\treturn -EINVAL;\n\trecord->txt_offset = 0;\n\treturn 0;\n}\n\nstatic int usb6fire_fw_ezusb_write(struct usb_device *device,\n\t\tint type, int value, char *data, int len)\n{\n\tint ret;\n\n\tret = usb_control_msg(device, usb_sndctrlpipe(device, 0), type,\n\t\t\tUSB_DIR_OUT | USB_TYPE_VENDOR | USB_RECIP_DEVICE,\n\t\t\tvalue, 0, data, len, HZ);\n\tif (ret < 0)\n\t\treturn ret;\n\telse if (ret != len)\n\t\treturn -EIO;\n\treturn 0;\n}\n\nstatic int usb6fire_fw_ezusb_read(struct usb_device *device,\n\t\tint type, int value, char *data, int len)\n{\n\tint ret = usb_control_msg(device, usb_rcvctrlpipe(device, 0), type,\n\t\t\tUSB_DIR_IN | USB_TYPE_VENDOR | USB_RECIP_DEVICE, value,\n\t\t\t0, data, len, HZ);\n\tif (ret < 0)\n\t\treturn ret;\n\telse if (ret != len)\n\t\treturn -EIO;\n\treturn 0;\n}\n\nstatic int usb6fire_fw_fpga_write(struct usb_device *device,\n\t\tchar *data, int len)\n{\n\tint actual_len;\n\tint ret;\n\n\tret = usb_bulk_msg(device, usb_sndbulkpipe(device, FPGA_EP), data, len,\n\t\t\t&actual_len, HZ);\n\tif (ret < 0)\n\t\treturn ret;\n\telse if (actual_len != len)\n\t\treturn -EIO;\n\treturn 0;\n}\n\nstatic int usb6fire_fw_ezusb_upload(\n\t\tstruct usb_interface *intf, const char *fwname,\n\t\tunsigned int postaddr, u8 *postdata, unsigned int postlen)\n{\n\tint ret;\n\tu8 data;\n\tstruct usb_device *device = interface_to_usbdev(intf);\n\tconst struct firmware *fw = NULL;\n\tstruct ihex_record *rec = kmalloc(sizeof(struct ihex_record),\n\t\t\tGFP_KERNEL);\n\n\tif (!rec)\n\t\treturn -ENOMEM;\n\n\tret = request_firmware(&fw, fwname, &device->dev);\n\tif (ret < 0) {\n\t\tkfree(rec);\n\t\tsnd_printk(KERN_ERR PREFIX \"error requesting ezusb \"\n\t\t\t\t\"firmware %s.\\n\", fwname);\n\t\treturn ret;\n\t}\n\tret = usb6fire_fw_ihex_init(fw, rec);\n\tif (ret < 0) {\n\t\tkfree(rec);\n\t\trelease_firmware(fw);\n\t\tsnd_printk(KERN_ERR PREFIX \"error validating ezusb \"\n\t\t\t\t\"firmware %s.\\n\", fwname);\n\t\treturn ret;\n\t}\n\t/* upload firmware image */\n\tdata = 0x01; /* stop ezusb cpu */\n\tret = usb6fire_fw_ezusb_write(device, 0xa0, 0xe600, &data, 1);\n\tif (ret < 0) {\n\t\tkfree(rec);\n\t\trelease_firmware(fw);\n\t\tsnd_printk(KERN_ERR PREFIX \"unable to upload ezusb \"\n\t\t\t\t\"firmware %s: begin message.\\n\", fwname);\n\t\treturn ret;\n\t}\n\n\twhile (usb6fire_fw_ihex_next_record(rec)) { /* write firmware */\n\t\tret = usb6fire_fw_ezusb_write(device, 0xa0, rec->address,\n\t\t\t\trec->data, rec->len);\n\t\tif (ret < 0) {\n\t\t\tkfree(rec);\n\t\t\trelease_firmware(fw);\n\t\t\tsnd_printk(KERN_ERR PREFIX \"unable to upload ezusb \"\n\t\t\t\t\t\"firmware %s: data urb.\\n\", fwname);\n\t\t\treturn ret;\n\t\t}\n\t}\n\n\trelease_firmware(fw);\n\tkfree(rec);\n\tif (postdata) { /* write data after firmware has been uploaded */\n\t\tret = usb6fire_fw_ezusb_write(device, 0xa0, postaddr,\n\t\t\t\tpostdata, postlen);\n\t\tif (ret < 0) {\n\t\t\tsnd_printk(KERN_ERR PREFIX \"unable to upload ezusb \"\n\t\t\t\t\t\"firmware %s: post urb.\\n\", fwname);\n\t\t\treturn ret;\n\t\t}\n\t}\n\n\tdata = 0x00; /* resume ezusb cpu */\n\tret = usb6fire_fw_ezusb_write(device, 0xa0, 0xe600, &data, 1);\n\tif (ret < 0) {\n\t\tsnd_printk(KERN_ERR PREFIX \"unable to upload ezusb \"\n\t\t\t\t\"firmware %s: end message.\\n\", fwname);\n\t\treturn ret;\n\t}\n\treturn 0;\n}\n\nstatic int usb6fire_fw_fpga_upload(\n\t\tstruct usb_interface *intf, const char *fwname)\n{\n\tint ret;\n\tint i;\n\tstruct usb_device *device = interface_to_usbdev(intf);\n\tu8 *buffer = kmalloc(FPGA_BUFSIZE, GFP_KERNEL);\n\tconst char *c;\n\tconst char *end;\n\tconst struct firmware *fw;\n\n\tif (!buffer)\n\t\treturn -ENOMEM;\n\n\tret = request_firmware(&fw, fwname, &device->dev);\n\tif (ret < 0) {\n\t\tsnd_printk(KERN_ERR PREFIX \"unable to get fpga firmware %s.\\n\",\n\t\t\t\tfwname);\n\t\tkfree(buffer);\n\t\treturn -EIO;\n\t}\n\n\tc = fw->data;\n\tend = fw->data + fw->size;\n\n\tret = usb6fire_fw_ezusb_write(device, 8, 0, NULL, 0);\n\tif (ret < 0) {\n\t\tkfree(buffer);\n\t\trelease_firmware(fw);\n\t\tsnd_printk(KERN_ERR PREFIX \"unable to upload fpga firmware: \"\n\t\t\t\t\"begin urb.\\n\");\n\t\treturn ret;\n\t}\n\n\twhile (c != end) {\n\t\tfor (i = 0; c != end && i < FPGA_BUFSIZE; i++, c++)\n\t\t\tbuffer[i] = byte_rev_table[(u8) *c];\n\n\t\tret = usb6fire_fw_fpga_write(device, buffer, i);\n\t\tif (ret < 0) {\n\t\t\trelease_firmware(fw);\n\t\t\tkfree(buffer);\n\t\t\tsnd_printk(KERN_ERR PREFIX \"unable to upload fpga \"\n\t\t\t\t\t\"firmware: fw urb.\\n\");\n\t\t\treturn ret;\n\t\t}\n\t}\n\trelease_firmware(fw);\n\tkfree(buffer);\n\n\tret = usb6fire_fw_ezusb_write(device, 9, 0, NULL, 0);\n\tif (ret < 0) {\n\t\tsnd_printk(KERN_ERR PREFIX \"unable to upload fpga firmware: \"\n\t\t\t\t\"end urb.\\n\");\n\t\treturn ret;\n\t}\n\treturn 0;\n}\n\n/* check, if the firmware version the devices has currently loaded\n * is known by this driver. 'version' needs to have 4 bytes version\n * info data. */\nstatic int usb6fire_fw_check(u8 *version)\n{\n\tint i;\n\n\tfor (i = 0; i < ARRAY_SIZE(known_fw_versions); i++)\n\t\tif (!memcmp(version, known_fw_versions + i, 2))\n\t\t\treturn 0;\n\n\tsnd_printk(KERN_ERR PREFIX \"invalid fimware version in device: %*ph. \"\n\t\t\t\"please reconnect to power. if this failure \"\n\t\t\t\"still happens, check your firmware installation.\",\n\t\t\t4, version);\n\treturn -EINVAL;\n}\n\nint usb6fire_fw_init(struct usb_interface *intf)\n{\n\tint i;\n\tint ret;\n\tstruct usb_device *device = interface_to_usbdev(intf);\n\t/* buffer: 8 receiving bytes from device and\n\t * sizeof(EP_W_MAX_PACKET_SIZE) bytes for non-const copy */\n\tu8 buffer[12];\n\n\tret = usb6fire_fw_ezusb_read(device, 1, 0, buffer, 8);\n\tif (ret < 0) {\n\t\tsnd_printk(KERN_ERR PREFIX \"unable to receive device \"\n\t\t\t\t\"firmware state.\\n\");\n\t\treturn ret;\n\t}\n\tif (buffer[0] != 0xeb || buffer[1] != 0xaa || buffer[2] != 0x55) {\n\t\tsnd_printk(KERN_ERR PREFIX \"unknown device firmware state \"\n\t\t\t\t\"received from device: \");\n\t\tfor (i = 0; i < 8; i++)\n\t\t\tsnd_printk(\"%02x \", buffer[i]);\n\t\tsnd_printk(\"\\n\");\n\t\treturn -EIO;\n\t}\n\t/* do we need fpga loader ezusb firmware? */\n\tif (buffer[3] == 0x01) {\n\t\tret = usb6fire_fw_ezusb_upload(intf,\n\t\t\t\t\"6fire/dmx6firel2.ihx\", 0, NULL, 0);\n\t\tif (ret < 0)\n\t\t\treturn ret;\n\t\treturn FW_NOT_READY;\n\t}\n\t/* do we need fpga firmware and application ezusb firmware? */\n\telse if (buffer[3] == 0x02) {\n\t\tret = usb6fire_fw_check(buffer + 4);\n\t\tif (ret < 0)\n\t\t\treturn ret;\n\t\tret = usb6fire_fw_fpga_upload(intf, \"6fire/dmx6firecf.bin\");\n\t\tif (ret < 0)\n\t\t\treturn ret;\n\t\tmemcpy(buffer, ep_w_max_packet_size,\n\t\t\t\tsizeof(ep_w_max_packet_size));\n\t\tret = usb6fire_fw_ezusb_upload(intf, \"6fire/dmx6fireap.ihx\",\n\t\t\t\t0x0003,\tbuffer, sizeof(ep_w_max_packet_size));\n\t\tif (ret < 0)\n\t\t\treturn ret;\n\t\treturn FW_NOT_READY;\n\t}\n\t/* all fw loaded? */\n\telse if (buffer[3] == 0x03)\n\t\treturn usb6fire_fw_check(buffer + 4);\n\t/* unknown data? */\n\telse {\n\t\tsnd_printk(KERN_ERR PREFIX \"unknown device firmware state \"\n\t\t\t\t\"received from device: \");\n\t\tfor (i = 0; i < 8; i++)\n\t\t\tsnd_printk(\"%02x \", buffer[i]);\n\t\tsnd_printk(\"\\n\");\n\t\treturn -EIO;\n\t}\n\treturn 0;\n}\n\n", "meta": {"content_hash": "15d745cdbedbb969c8d9c35a1595961a", "timestamp": "", "source": "github", "line_count": 404, "max_line_length": 80, "avg_line_length": 24.95049504950495, "alnum_prop": 0.645436507936508, "repo_name": "Ant-OS/android_kernel_moto_shamu", "id": "b9defcdeb7ef805af05a6453ce309a2eb64bdb18", "size": "10519", "binary": false, "copies": "2121", "ref": "refs/heads/master", "path": "sound/usb/6fire/firmware.c", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ASP", "bytes": "4528"}, {"name": "Assembly", "bytes": "9738337"}, {"name": "Awk", "bytes": "18681"}, {"name": "C", "bytes": "467488098"}, {"name": "C++", "bytes": "3473858"}, {"name": "Clojure", "bytes": "547"}, {"name": "Groff", "bytes": "22012"}, {"name": "Lex", "bytes": "40805"}, {"name": "Makefile", "bytes": "1342678"}, {"name": "Objective-C", "bytes": "1121986"}, {"name": "Perl", "bytes": "461504"}, {"name": "Python", "bytes": "33978"}, {"name": "Scilab", "bytes": "21433"}, {"name": "Shell", "bytes": "138789"}, {"name": "SourcePawn", "bytes": "4687"}, {"name": "UnrealScript", "bytes": "6113"}, {"name": "Yacc", "bytes": "83091"}]}} {"text": "\n\n/**\n * @file\n * @brief\n * This file defines the OpenThread crypto C APIs.\n */\n\n#ifndef OPENTHREAD_CRYPTO_H_\n#define OPENTHREAD_CRYPTO_H_\n\n#include \n#include \n\n#ifdef __cplusplus\nextern \"C\" {\n#endif\n\n/**\n * @addtogroup api-crypto\n *\n * @brief\n * This module includes cryptographic functions.\n *\n * @{\n *\n */\n\n#define OT_CRYPTO_HMAC_SHA_HASH_SIZE 32 ///< Length of HMAC SHA (in bytes).\n\n/**\n * This function performs HMAC computation.\n *\n * @param[in] aKey A pointer to the key.\n * @param[in] aKeyLength The key length in bytes.\n * @param[in] aBuf A pointer to the input buffer.\n * @param[in] aBufLength The length of @p aBuf in bytes.\n * @param[out] aHash A pointer to the output hash buffer.\n *\n */\nvoid otCryptoHmacSha256(\n const uint8_t *aKey, uint16_t aKeyLength,\n const uint8_t *aBuf, uint16_t aBufLength,\n uint8_t *aHash);\n\n/**\n * This method performs AES CCM computation.\n *\n * @param[in] aKey A pointer to the key.\n * @param[in] aKeyLength Length of the key in bytes.\n * @param[in] aTagLength Length of tag in bytes.\n * @param[in] aNonce A pointer to the nonce.\n * @param[in] aNonceLength Length of nonce in bytes.\n *\n * @param[in] aHeader A pointer to the header.\n * @param[in] aHeaderLength Length of header in bytes.\n *\n * @param[inout] aPlainText A pointer to the plaintext.\n * @param[inout] aCipherText A pointer to the ciphertext.\n * @param[in] aLength Plaintext length in bytes.\n * @param[in] aEncrypt `true` on encrypt and `false` on decrypt.\n *\n * @param[out] aTag A pointer to the tag.\n *\n */\nvoid otCryptoAesCcm(\n const uint8_t *aKey, uint16_t aKeyLength,\n uint8_t aTagLength,\n const void *aNonce, uint8_t aNonceLength,\n const void *aHeader, uint32_t aHeaderLength,\n void *aPlainText, void *aCipherText, uint32_t aLength, bool aEncrypt,\n void *aTag);\n\n/**\n * @}\n *\n */\n\n#ifdef __cplusplus\n} // extern \"C\"\n#endif\n\n#endif // OPENTHREAD_CRYPTO_H_\n", "meta": {"content_hash": "e9706e9d8632e56471be3249fed671f8", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 79, "avg_line_length": 25.10843373493976, "alnum_prop": 0.6228406909788867, "repo_name": "fbsder/openthread", "id": "d80a45af879bf664b492b6300da0961dd367f192", "size": "3692", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "include/openthread/crypto.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Batchfile", "bytes": "10128"}, {"name": "C", "bytes": "574131"}, {"name": "C#", "bytes": "18077"}, {"name": "C++", "bytes": "3369485"}, {"name": "M4", "bytes": "47539"}, {"name": "Makefile", "bytes": "81726"}, {"name": "Python", "bytes": "1050179"}, {"name": "Ruby", "bytes": "3397"}, {"name": "Shell", "bytes": "26917"}]}} {"text": "\r\n\r\n\r\n \r\n \r\n \r\n \r\n\r\n Eterno Amor\r\n \r\n \r\n\r\n \r\n\r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n \r\n\r\n \r\n\r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n\r\n \r\n\r\n \r\n\r\n\r\n
\r\n\r\n \r\n
\r\n

Eterno Amor

\r\n\r\n

\r\n \r\n ★★★★☆\r\n \r\n\r\n Wanderley Caloni, January 6, 2016

\r\n

\r\n\r\n\r\n \r\n\r\n \r\n

\r\n

Um misto de remorso pelas agruras da guerra com a energia de uma investiga\u00e7\u00e3o guiada unicamente pela f\u00e9 (ou pelo amor). A beleza da fotografia fria e triste do front se contrap\u00f5e aos horizontes on\u00edricos do presente nost\u00e1lgico, que clama pela elucida\u00e7\u00e3o completa de um passado nebuloso que separou um casal apaixonado prestes a se casar. Eterno Amor \u00e9 pura poesia na forma de criatividade narrativa. Um Pierre Jeunet que retrabalha sua Am\u00e9lie Poulain em tra\u00e7os mais cru\u00e9is e menos esperan\u00e7osos, e que tenta soar como um romance \u00e9pico em torno de personagens com pouca alma e muita persist\u00eancia.

\n\n

Utilizando novamente Audrey Tautou, a namoradinha da Fran\u00e7a, o diretor Jean-Pierre Jeunet mais uma vez aplica a bel\u00edssima fotografia de seu colaborador Bruno Delbonnel (Harry Potter e o Enigma do Pr\u00edncipe, Inside Llewyn Davis: Balada de um Homem Comum, Am\u00e9lie Poulain) e a din\u00e2mica e inventiva montagem de seu editor Herv\u00e9 Schneid (Micmacs - Um Plano Complicado) para contar uma hist\u00f3ria cheia de poder criativo, mas que encontra em seu n\u00facleo um drama intranspon\u00edvel para o estilo do diretor.

\n\n

Sua busca incessante, no entanto, em tentar juntar todas as pistas que a bela mas manca Mathilde (Tatou) vai acumulando, testemunho ap\u00f3s testemunho, em busca da verdade definitiva a respeito do paradeiro de seu amor, Manech (Gaspard Ulliel), \u00e9 t\u00e3o contagiante que o mundo que se cria em torno acaba compensando a total falta de realismo nesse conto quase-surrealista.

\n\n

Com aspectos t\u00e9cnicos impressionantes a cada cena – exceto talvez pela m\u00fasica de Angelo Badalamenti, repetitiva e mon\u00f3tona, mas ainda assim condizente com a proposta do filme – e com um ritmo que vai se formando pela repeti\u00e7\u00e3o (a insist\u00eancia do carteiro em espalhar o cascalho da entrada da casa de Mathilde, s\u00f3 pelo bem da “entrada triunfal”), o roteiro da dupla Guillaume Laurant e do pr\u00f3prio Pierre Jeunet, baseados no romance de S\u00e9bastien Japrisot, n\u00e3o consegue se desvencilhar da sua complexidade em utilizar diferentes personagens que se parecem em situa\u00e7\u00f5es que se embaralham, o que se por um lado acaba contribuindo para a atmosfera de desorienta\u00e7\u00e3o de Mathilde, vai aos poucos se tornando uma distra\u00e7\u00e3o inc\u00f4moda para o espectador, que j\u00e1 n\u00e3o espera encontrar qualquer conex\u00e3o memor\u00e1vel entre as pistas.

\n\n

At\u00e9 mesmo a dualidade de cores frias da guerra com as cores sempre aquecidas do presente da protagonista, fascinante no come\u00e7o, tamb\u00e9m vai se esvaecendo com a cada vez mais distante capacidade de atribuir significado naquele emaranhado de s\u00edmbolos. Jeunet se deixa sabotar pela sua pr\u00f3pria obsess\u00e3o de detalhes, e assim como seu mais recente trabalho, Uma Viagem Extraordin\u00e1ria, se esquece das emo\u00e7\u00f5es prim\u00e1rias de seus personagens para focar unicamente nas idiossincrasias de sua complexa hist\u00f3ria.

\n\n

O que acaba por fim em tornar tudo aquilo uma imensa espiral de eventos que revela uma estrutura tal qual as escadas do farol onde o casal se encontrava: aparentemente infinita, mas se encarada com dedica\u00e7\u00e3o e empenho, alcan\u00e7\u00e1vel at\u00e9 por uma manca que deseja enxergar al\u00e9m de suas limita\u00e7\u00f5es f\u00edsicas. \u00c9 o metaf\u00edsico celebrado em vida. A m\u00e1gica de usar o cinema como cornuc\u00f3pia de simbolismos visuais.

\n\r\n Imagens e cr\u00e9ditos no IMDB.\r\n\r\n
\r\n\r\n \r\n \r\n ★★★★☆\r\n \r\n\r\n Eterno Amor ● Eterno Amor. Un long dimanche de fian\u00e7ailles (France, 2004). Dirigido por Jean-Pierre Jeunet. Escrito por S\u00e9bastien Japrisot, Jean-Pierre Jeunet, Guillaume Laurant, Guillaume Laurant. Com Audrey Tautou, Gaspard Ulliel, Dominique Pinon, Chantal Neuwirth, Andr\u00e9 Dussollier, Ticky Holgado, Marion Cotillard, Dominique Bettenfeld, Jodie Foster. ● Nota: 4/5. Categoria: movies. Publicado em 2016-01-06. Texto escrito por Wanderley Caloni.\r\n\r\n


Quer comentar?

\r\n\r\n\r\n
\r\n\r\n
\r\n\r\n
\r\n \r\n\r\n
\r\n\r\n \r\n
\r\n
\r\n \r\n Share\r\n \r\n\r\n  \r\n\r\n \r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n \r\n\r\n \r\n \r\n \r\n \r\n \r\n \r\n
\r\n
\r\n\r\n\r\n
\r\n\r\n \r\n \r\n
\r\n\r\n \r\n\r\n \r\n\r\n\r\n", "meta": {"content_hash": "5750f5fb43c3be1ac0ed645741172b4e", "timestamp": "", "source": "github", "line_count": 296, "max_line_length": 842, "avg_line_length": 41.986486486486484, "alnum_prop": 0.6437882201480528, "repo_name": "cinetenisverde/cinetenisverde.github.io", "id": "50eb0e0d692d60f0129f5cd086d6384190bc5365", "size": "12522", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "eterno-amor/index.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "533"}, {"name": "HTML", "bytes": "31113501"}, {"name": "JavaScript", "bytes": "3266"}, {"name": "Python", "bytes": "2943"}]}} {"text": "function toggle_visibility(id) {\n\t\t\t\t\tvar e = document.getElementById(id);\n\t\t\t\t\tif(e.style.display == 'block')\n\t\t\t\t\t\te.style.display = 'none';\n\t\t\t\t\telse\n\t\t\t\t\t\te.style.display = 'block';\n}\n \n \njQuery(document).ready(function($) {\n \n $(\".scroll\").click(function(event){\t\t\n \tevent.preventDefault();\n $('body').animate({scrollTop:$(this.hash).offset().top}, 1500);\n });\n});", "meta": {"content_hash": "7d199aa600dde168b96b11fead6cf842", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 91, "avg_line_length": 33.1875, "alnum_prop": 0.4218455743879473, "repo_name": "regravity/ThreadCSS", "id": "65450d0d0e315646c520802a73cde60b56721649", "size": "531", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "js/function.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "42847"}, {"name": "HTML", "bytes": "5902"}, {"name": "JavaScript", "bytes": "531"}]}} {"text": "\npackage com.intellij.refactoring.move.moveInstanceMethod;\n\nimport com.intellij.openapi.actionSystem.DataContext;\nimport com.intellij.openapi.editor.Editor;\nimport com.intellij.openapi.project.Project;\nimport com.intellij.psi.*;\nimport com.intellij.refactoring.move.MoveCallback;\nimport com.intellij.refactoring.move.MoveHandlerDelegate;\nimport com.intellij.refactoring.move.moveClassesOrPackages.JavaMoveClassesOrPackagesHandler;\nimport org.jetbrains.annotations.Nullable;\n\npublic class MoveInstanceMethodHandlerDelegate extends MoveHandlerDelegate {\n @Override\n public boolean canMove(final PsiElement[] elements, @Nullable final PsiElement targetContainer) {\n if (elements.length != 1) return false;\n PsiElement element = elements [0];\n if (!(element instanceof PsiMethod)) return false;\n if (element instanceof SyntheticElement) return false;\n PsiMethod method = (PsiMethod) element;\n if (method.hasModifierProperty(PsiModifier.STATIC)) return false;\n return targetContainer == null || super.canMove(elements, targetContainer);\n }\n\n @Override\n public boolean isValidTarget(final PsiElement targetElement, PsiElement[] sources) {\n for (PsiElement source : sources) {\n if (JavaMoveClassesOrPackagesHandler.invalid4Move(source)) return false;\n }\n return targetElement instanceof PsiClass && !(targetElement instanceof PsiAnonymousClass);\n }\n\n @Override\n public boolean tryToMove(final PsiElement element, final Project project, final DataContext dataContext, final PsiReference reference,\n final Editor editor) {\n if (element instanceof PsiMethod) {\n PsiMethod method = (PsiMethod) element;\n if (!method.hasModifierProperty(PsiModifier.STATIC)) {\n new MoveInstanceMethodHandler().invoke(project, new PsiElement[]{method}, dataContext);\n return true;\n }\n }\n return false;\n }\n\n @Override\n public void doMove(final Project project, final PsiElement[] elements, final PsiElement targetContainer, final MoveCallback callback) {\n new MoveInstanceMethodHandler().invoke(project, elements, null);\n }\n}\n", "meta": {"content_hash": "258b6a6fd63e3565e06b0802904c7518", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 137, "avg_line_length": 42.2, "alnum_prop": 0.762085308056872, "repo_name": "xfournet/intellij-community", "id": "baeb4deb8b2b865eafb7fa19f7003296554b0a91", "size": "2710", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "java/java-impl/src/com/intellij/refactoring/move/moveInstanceMethod/MoveInstanceMethodHandlerDelegate.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "AMPL", "bytes": "20665"}, {"name": "AspectJ", "bytes": "182"}, {"name": "Batchfile", "bytes": "60827"}, {"name": "C", "bytes": "211454"}, {"name": "C#", "bytes": "1264"}, {"name": "C++", "bytes": "199030"}, {"name": "CMake", "bytes": "1675"}, {"name": "CSS", "bytes": "201445"}, {"name": "CoffeeScript", "bytes": "1759"}, {"name": "Erlang", "bytes": "10"}, {"name": "Groovy", "bytes": "3289024"}, {"name": "HLSL", "bytes": "57"}, {"name": "HTML", "bytes": "1901772"}, {"name": "J", "bytes": "5050"}, {"name": "Java", "bytes": "166392304"}, {"name": "JavaScript", "bytes": "570364"}, {"name": "Jupyter Notebook", "bytes": "93222"}, {"name": "Kotlin", "bytes": "4720744"}, {"name": "Lex", "bytes": "147486"}, {"name": "Makefile", "bytes": "2352"}, {"name": "NSIS", "bytes": "51061"}, {"name": "Objective-C", "bytes": "27861"}, {"name": "Perl", "bytes": "903"}, {"name": "Perl 6", "bytes": "26"}, {"name": "Protocol Buffer", "bytes": "6680"}, {"name": "Python", "bytes": "25477371"}, {"name": "Roff", "bytes": "37534"}, {"name": "Ruby", "bytes": "1217"}, {"name": "Shell", "bytes": "64141"}, {"name": "Smalltalk", "bytes": "338"}, {"name": "TeX", "bytes": "25473"}, {"name": "Thrift", "bytes": "1846"}, {"name": "TypeScript", "bytes": "9469"}, {"name": "Visual Basic", "bytes": "77"}, {"name": "XSLT", "bytes": "113040"}]}} {"text": "/* A Bison parser, made by GNU Bison 3.0.4. */\n\n/* Bison interface for Yacc-like parsers in C\n\n Copyright (C) 1984, 1989-1990, 2000-2015 Free Software Foundation, Inc.\n\n This program is free software: you can redistribute it and/or modify\n it under the terms of the GNU General Public License as published by\n the Free Software Foundation, either version 3 of the License, or\n (at your option) any later version.\n\n This program is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n GNU General Public License for more details.\n\n You should have received a copy of the GNU General Public License\n along with this program. If not, see . */\n\n/* As a special exception, you may create a larger work that contains\n part or all of the Bison parser skeleton and distribute that work\n under terms of your choice, so long as that work isn't itself a\n parser generator using the skeleton or a modified version thereof\n as a parser skeleton. Alternatively, if you modify or redistribute\n the parser skeleton itself, you may (at your option) remove this\n special exception, which will cause the skeleton and the resulting\n Bison output files to be licensed under the GNU General Public\n License without this special exception.\n\n This special exception was added by the Free Software Foundation in\n version 2.2 of Bison. */\n\n#ifndef YY_YY_PARSE_H_INCLUDED\n# define YY_YY_PARSE_H_INCLUDED\n/* Debug traces. */\n#ifndef YYDEBUG\n# define YYDEBUG 0\n#endif\n#if YYDEBUG\nextern int yydebug;\n#endif\n\n/* Token type. */\n#ifndef YYTOKENTYPE\n# define YYTOKENTYPE\n enum yytokentype\n {\n CHAR = 258,\n NUMBER = 259,\n SECTEND = 260,\n SCDECL = 261,\n XSCDECL = 262,\n NAME = 263,\n PREVCCL = 264,\n EOF_OP = 265,\n OPTION_OP = 266,\n OPT_OUTFILE = 267,\n OPT_PREFIX = 268,\n OPT_YYCLASS = 269,\n OPT_HEADER = 270,\n OPT_EXTRA_TYPE = 271,\n OPT_TABLES = 272,\n CCE_ALNUM = 273,\n CCE_ALPHA = 274,\n CCE_BLANK = 275,\n CCE_CNTRL = 276,\n CCE_DIGIT = 277,\n CCE_GRAPH = 278,\n CCE_LOWER = 279,\n CCE_PRINT = 280,\n CCE_PUNCT = 281,\n CCE_SPACE = 282,\n CCE_UPPER = 283,\n CCE_XDIGIT = 284,\n CCE_NEG_ALNUM = 285,\n CCE_NEG_ALPHA = 286,\n CCE_NEG_BLANK = 287,\n CCE_NEG_CNTRL = 288,\n CCE_NEG_DIGIT = 289,\n CCE_NEG_GRAPH = 290,\n CCE_NEG_LOWER = 291,\n CCE_NEG_PRINT = 292,\n CCE_NEG_PUNCT = 293,\n CCE_NEG_SPACE = 294,\n CCE_NEG_UPPER = 295,\n CCE_NEG_XDIGIT = 296,\n CCL_OP_DIFF = 297,\n CCL_OP_UNION = 298,\n BEGIN_REPEAT_POSIX = 299,\n END_REPEAT_POSIX = 300,\n BEGIN_REPEAT_FLEX = 301,\n END_REPEAT_FLEX = 302\n };\n#endif\n/* Tokens. */\n#define CHAR 258\n#define NUMBER 259\n#define SECTEND 260\n#define SCDECL 261\n#define XSCDECL 262\n#define NAME 263\n#define PREVCCL 264\n#define EOF_OP 265\n#define OPTION_OP 266\n#define OPT_OUTFILE 267\n#define OPT_PREFIX 268\n#define OPT_YYCLASS 269\n#define OPT_HEADER 270\n#define OPT_EXTRA_TYPE 271\n#define OPT_TABLES 272\n#define CCE_ALNUM 273\n#define CCE_ALPHA 274\n#define CCE_BLANK 275\n#define CCE_CNTRL 276\n#define CCE_DIGIT 277\n#define CCE_GRAPH 278\n#define CCE_LOWER 279\n#define CCE_PRINT 280\n#define CCE_PUNCT 281\n#define CCE_SPACE 282\n#define CCE_UPPER 283\n#define CCE_XDIGIT 284\n#define CCE_NEG_ALNUM 285\n#define CCE_NEG_ALPHA 286\n#define CCE_NEG_BLANK 287\n#define CCE_NEG_CNTRL 288\n#define CCE_NEG_DIGIT 289\n#define CCE_NEG_GRAPH 290\n#define CCE_NEG_LOWER 291\n#define CCE_NEG_PRINT 292\n#define CCE_NEG_PUNCT 293\n#define CCE_NEG_SPACE 294\n#define CCE_NEG_UPPER 295\n#define CCE_NEG_XDIGIT 296\n#define CCL_OP_DIFF 297\n#define CCL_OP_UNION 298\n#define BEGIN_REPEAT_POSIX 299\n#define END_REPEAT_POSIX 300\n#define BEGIN_REPEAT_FLEX 301\n#define END_REPEAT_FLEX 302\n\n/* Value type. */\n#if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED\ntypedef int YYSTYPE;\n# define YYSTYPE_IS_TRIVIAL 1\n# define YYSTYPE_IS_DECLARED 1\n#endif\n\n\nextern YYSTYPE yylval;\n\nint yyparse (void);\n\n#endif /* !YY_YY_PARSE_H_INCLUDED */\n", "meta": {"content_hash": "42ea53de0b18b9312c7c7ce32521c551", "timestamp": "", "source": "github", "line_count": 154, "max_line_length": 76, "avg_line_length": 26.733766233766232, "alnum_prop": 0.7002671848433325, "repo_name": "loki04/csibe", "id": "033f91974e11261d03432c8861554b8c22759016", "size": "4117", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/flex-2.6.0/src/parse.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Awk", "bytes": "2800"}, {"name": "C", "bytes": "1097132"}, {"name": "C++", "bytes": "8474"}, {"name": "CMake", "bytes": "15006"}, {"name": "CSS", "bytes": "1746"}, {"name": "Groff", "bytes": "26612"}, {"name": "HTML", "bytes": "128005"}, {"name": "LLVM", "bytes": "5737"}, {"name": "Lex", "bytes": "136350"}, {"name": "M4", "bytes": "99282"}, {"name": "Makefile", "bytes": "325456"}, {"name": "PHP", "bytes": "3703"}, {"name": "Perl", "bytes": "12320"}, {"name": "Prolog", "bytes": "430"}, {"name": "Python", "bytes": "17647"}, {"name": "Shell", "bytes": "427842"}, {"name": "TeX", "bytes": "323102"}, {"name": "XSLT", "bytes": "12288"}, {"name": "Yacc", "bytes": "34188"}]}} {"text": "class CreateBodyParts < ActiveRecord::Migration\n def change\n create_table :body_parts do |t|\n t.string :name\n t.string :slug\n\n t.timestamps\n end\n end\nend\n", "meta": {"content_hash": "bcbb25f0fca33b6b77f093ac63fc8a3c", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 47, "avg_line_length": 17.7, "alnum_prop": 0.6497175141242938, "repo_name": "ericallfesta/kirarirenew", "id": "2c5397417d461fc527311a5c819eb20986940424", "size": "177", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "proto.git 2/db/migrate/20140103132455_create_body_parts.rb", "mode": "33261", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "170444"}, {"name": "CoffeeScript", "bytes": "2178"}, {"name": "JavaScript", "bytes": "99649"}, {"name": "Ruby", "bytes": "234345"}]}} {"text": "Making a formula is easy. Just `brew create URL` and then `brew install $FORMULA` (perhaps with `--debug --verbose`). Basically, a formula is a Ruby file. You can place it anywhere you want (local or remote) and install it by pointing to the file or URL.\n\nWe want your formula to be awesome, and the cookbook will tell you how.\n\n## Terminology - Homebrew speak\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
FormulaThe package definition/usr/local/Library/Formula/foo.rb
KegThe installation prefix of a Formula/usr/local/Cellar/foo/0.1
opt prefixA symlink to the active version of a keg/usr/local/opt/foo
CellarAll kegs are installed here/usr/local/Cellar
TapAn optional repository (git) of Formulae/usr/local/Library/Taps
BottlePre-built (binary) Keg that can be unpackedqt-4.8.4.mountain_lion.bottle.1.tar.gz
\n\n_More general: `brew --prefix` and `brew --repository` instead of `/usr/local` but lets KISS._\n\n\n## An Introduction\n\nDid you see `/usr/local/.git`? Homebrew is built on Git. This means you can just do your work in `/usr/local` and merge in upstream changes as you go.\n\nHomebrew installs to the `Cellar`, it then symlinks some of the installation into `/usr/local` so that other programs can see what's going on. We suggest you `brew ls` a few of the kegs in your Cellar to see how it is all arranged.\n\nPackages are installed according to their formulae, which live in `$(brew --repository)/Library/Formula`. Check some out. You can view any formula at anytime; e.g. `brew edit wget`.\n\n\n\n# Basic Instructions\n\nMake sure you run `brew update` before you start. This turns your Homebrew installation into a Git repository.\n\nBefore contributing, make sure your package:\n\n* meets all our [Acceptable Formulae](Acceptable-Formulae.md) requirements\n* isn't already in Homebrew (check `brew search $FORMULA`)\n* isn't in another [Homebrew tap](https://github.com/Homebrew)\n* isn't already waiting to be merged (check the [issue tracker](https://github.com/Homebrew/homebrew/issues))\n* is still supported by upstream\n* has a stable, tagged version (i.e. not just a GitHub repository with no versions). See [Interesting-Taps-&-Branches](Interesting-Taps-&-Branches.md) for where pre-release and head-only versions belong.\n\nMake sure you search thoroughly (all aliases!). We don\u2019t want you to waste your time.\n\nBe sure to look over the [contributing guidelines](https://github.com/Homebrew/homebrew/blob/master/CONTRIBUTING.md) as well.\n\n\n## Will we merge your formula?\n\nProbably. But we have rules to keep the quality and goals of Homebrew intact: Please read [Acceptable Formulae](Acceptable-Formulae.md).\n\n## Some Quick Examples Before You Get Started\n\nFormulae aren\u2019t that complicated. [etl](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/etl.rb) is as simple as it gets.\n\nAnd then [Git](https://github.com/Homebrew/homebrew/tree/master/Library/Formula/git.rb) and [flac](https://github.com/Homebrew/homebrew/tree/master/Library/Formula/flac.rb) show more advanced functionality.\n\nA more complete example-formula [cheat-sheet](https://github.com/Homebrew/homebrew/blob/master/Library/Contributions/example-formula.rb) shows almost all the stuff you can use in a Formula.\n\n## Grab the URL\n\nAll you need to make a formula is a URL to the tarball.\n\n brew create http://example.com/foo-0.1.tar.gz\n\nThis creates:\n\n`$HOMEBREW_REPOSITORY/Library/Formula/foo.rb`\n\nAnd opens it in your `$EDITOR`. It'll look like:\n\n```ruby\nclass Foo < Formula\n url \"http://example.com/foo-0.1.tar.gz\"\n homepage \"\"\n sha256 \"85cc828a96735bdafcf29eb6291ca91bac846579bcef7308536e0c875d6c81d7\"\n\n # depends_on \"cmake\" => :build\n\n def install\n system \"./configure\", \"--prefix=#{prefix}\", \"--disable-debug\", \"--disable-dependency-tracking\"\n# system \"cmake\", \".\", *std_cmake_args\n system \"make\", \"install\"\n end\nend\n```\n\n**Note:** If `brew` said `Warning: Version cannot be determined from URL` when doing the `create` step, you\u2019ll need to explicitly add the correct version to the formula with `version \"foo\"` **and then save the formula**. `brew install` should then proceed without any trouble.\n\n**Note:** If `brew` said `No formula found for \"php54-timezonedb\". Searching open pull requests...` and you are writing a Tap, you should run `brew tap --repair`.\n\n## Fill in the Homepage\n\n**We don\u2019t accept formulae without homepages!**\n\nHomebrew now has a description field (`desc`). Try and summarize this from the homepage.\n\n## Check the build system\n\n brew install -i foo\n\nYou\u2019re now at new prompt with the tarball extracted to a temporary sandbox.\n\nCheck the package\u2019s `README`. Does the package install with `autotools`, `cmake`, or something else? Delete the commented out cmake lines if the package uses autotools (i.e. if it has a `configure` script).\n\n\n## Check for dependencies\n\nThe `README` probably tells you about dependencies. Homebrew or OS X probably already has them. You can check for Homebrew deps with `brew search`. These are the common deps that OS X comes with:\n\n* `libexpat`\n* `libGL`\n* `libiconv`\n* `libpcap`\n* `libxml2`\n* `Python`\n* `Ruby`\n\nThere are plenty of others. Check `/usr/lib` to see.\n\nWe try to not duplicate libraries and complicated tools in core Homebrew. We dupe some common tools though. But generally, we avoid dupes because it\u2019s one of Homebrew\u2019s foundations. (And it causes build and usage problems!)\n\nThe one special exception is OpenSSL. Anything that uses OpenSSL *should* be built using Homebrew\u2019s shipped OpenSSL and our test bot's post-install audit will warn of this when it is detected. (*Of course, there are exceptions to the exception. Not everything can be forced onto our OpenSSL)*.\n\nBecause Homebrew\u2019s OpenSSL is `keg_only` to avoid conflicting with the system sometimes formulae need to have environmental variables set or special configuration flags passed to locate our preferred OpenSSL; you can see this mechanism in the [clamav](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/clamav.rb#L28) formula. Usually this is unnecessary because when OpenSSL is specified as a dependency Homebrew temporarily prepends the $PATH with that prefix.\n\nHomebrew maintains a special [tap that provides other useful dupes](https://github.com/Homebrew/homebrew-dupes).\n\n*Important:* Since the introduction of `superenv`, `brew --prefix`/bin is NOT on the `$PATH` during formula installation. If you have dependencies at build time, you must specify them and brew will add them to the `$PATH`. You can test around this with `--env=std`.\n\n\n## Specifying other formulae as dependencies\n\n```ruby\nclass Foo < Formula\n depends_on \"jpeg\"\n depends_on \"gtk+\" => :optional\n depends_on \"readline\" => :recommended\n depends_on \"boost\" => \"with-icu\"\n depends_on :x11 => :optional\nend\n```\n\nA String specifies a formula dependency.\n\nA Symbol specifies a special conditional dependency, such as X11.\n\nA Hash specifies a formula dependency with some additional information. Given a single string key, the value can take several forms:\n* a Symbol (currently one of `:build`, `:optional`, `:recommended`).\n - `:build` tags that dependency as a build-time only dependency, meaning it can be safely ignored\n when installing from a bottle and when listing missing dependencies using `brew missing`.\n - `:optional` generates an implicit `with-foo` option for the formula. This means that, given\n `depends_on \"foo\" => :optional`, the user must pass `--with-foo` in order to enable the dependency.\n - `:recommended` generates an implicit `without-foo` option, meaning that the dependency is enabled\n by default and the user must pass `--without-foo` to disable this dependency. The default\n description can be overridden using the normal option syntax (in this case, the option declaration must precede the dependency):\n\n ```ruby\n option \"with-foo\", \"Compile with foo bindings\" # This overrides the generated description if you want to\n depends_on \"foo\" => :optional # Generated description is \"Build with foo support\"\n ```\n\n* a String or an Array\n String values are interpreted as options to be passed to the dependency. You can also pass\n an array of strings, or an array of symbols and strings, in which case the symbols are\n interpreted as described above, and the strings are passed to the dependency as options.\n\n ```ruby\n depends_on \"foo\" => \"with-bar\"\n depends_on \"foo\" => %w{with-bar with-baz}\n depends_on \"foo\" => [:optional, \"with-bar\"]\n ```\n\n\n## Specifying other formulae as conflicts\n\nSometimes there\u2019s hard conflict between formulae, and it can\u2019t be avoided or circumvented with `keg_only`.\n\nPolarSSL is a good [example](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/polarssl.rb#L36-L37) formula for minor conflict.\n\nPolarSSL ship GNU\u2019s Hello, and compiles a `hello` binary. This is obviously non-essential to PolarSSL\u2019s functionality, and conflict with the `hello` formula would be overkill, so we just remove it.\n\nHowever, also in the PolarSSL formulae is a [firm conflict](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/polarssl.rb#L19) with `md5sha1sum`, because both `md5sha1sum` and `polarssl` compile identically-named binaries that *are* important for core functionality.\n\nAs a general rule, `conflicts_with` should be a last-resort option. It\u2019s a fairly blunt instrument.\n\nThe syntax for conflict that can\u2019t be worked around is\n\n```ruby\nconflicts_with \"blueduck\", :because => \"yellowduck also ships a duck binary\"\n```\n\n## Formulae Revisions\n\nIn Homebrew we sometimes accept formulae updates that don\u2019t include a version bump. These include homepage changes, resource updates, new patches or fixing a security issue with a formula.\n\nOccasionally, these updates require a forced-recompile of the formula itself or its dependents to either ensure formulae continue to function as expected or to close a security issue. This forced-recompile is known as a `revision` and inserted underneath the homepage/url/sha block.\n\nWhere a dependent of a formula fails against a new version of that dependency it must receive a `revision`. An example of such failure can be seen [here](https://github.com/Homebrew/homebrew/issues/31195) and the fix [here](https://github.com/Homebrew/homebrew/pull/31207).\n\n`Revisions` are also used for formulae that move from the system OpenSSL to the Homebrew-shipped OpenSSL without any other changes to that formula. This ensures users aren\u2019t left exposed to the potential security issues of the outdated OpenSSL. An example of this can be seen in [this commit](https://github.com/Homebrew/homebrew/commit/6b9d60d474d72b1848304297d91adc6120ea6f96).\n\n## Double-check for dependencies\n\nWhen you already have a lot of brews installed, its easy to miss a common dependency like `glib` or `gettext`.\n\nYou can double-check which libraries a binary links to with the `otool` command (perhaps you need to use `xcrun otool`):\n\n $ otool -L /usr/local/bin/ldapvi\n /usr/local/bin/ldapvi:\n\t/usr/local/opt/openssl/lib/libssl.1.0.0.dylib (compatibility version 1.0.0, current version 1.0.0)\n\t/usr/local/opt/openssl/lib/libcrypto.1.0.0.dylib (compatibility version 1.0.0, current version 1.0.0)\n\t/usr/local/lib/libglib-2.0.0.dylib (compatibility version 4201.0.0, current version 4201.0.0)\n\t/usr/local/opt/gettext/lib/libintl.8.dylib (compatibility version 10.0.0, current version 10.2.0)\n\t/usr/local/opt/readline/lib/libreadline.6.dylib (compatibility version 6.0.0, current version 6.3.0)\n\t/usr/local/lib/libpopt.0.dylib (compatibility version 1.0.0, current version 1.0.0)\n\t/usr/lib/libncurses.5.4.dylib (compatibility version 5.4.0, current version 5.4.0)\n\t/System/Library/Frameworks/LDAP.framework/Versions/A/LDAP (compatibility version 1.0.0, current version 2.4.0)\n\t/usr/lib/libresolv.9.dylib (compatibility version 1.0.0, current version 1.0.0)\n\t/usr/lib/libSystem.B.dylib (compatibility version 1.0.0, current version 1213.0.0)\n\n\n## Specifying gems, Python modules, Go projects, etc. as dependencies\n\nHomebrew doesn\u2019t package already packaged language-specific libraries. These should be installed directly from `gem`/`cpan`/`pip` etc.\n\nIf you're installing an application then please locally vendor all the language-specific dependencies:\n\n```ruby\nclass Foo < Formula\n resource \"pycrypto\" do\n url \"https://pypi.python.org/packages/source/p/pycrypto/pycrypto-2.6.tar.gz\"\n sha256 \"85cc828a96735bdafcf29eb6291ca91bac846579bcef7308536e0c875d6c81d7\"\n end\n\n def install\n resource(\"pycrypto\").stage { system \"python\", *Language::Python.setup_install_args(libexec/\"vendor\") }\n end\nend\n```\n\n[jrnl](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/jrnl.rb) is an example of a formula that does this well. The end-result means the user doesn't have to faff with `pip` or Python and can just run `jrnl`.\n\n[homebrew-pypi-poet](https://github.com/tdsmith/homebrew-pypi-poet) can help you generate resource stanzas for the dependencies of your Python application.\n\nSimilarly, [homebrew-go-resources](https://github.com/samertm/homebrew-go-resources) can help you generate go\\_resource stanzas for the dependencies of your go application.\n\nIf your formula needs a gem or python module and it can't be made into a resource you\u2019ll need to check for these external dependencies:\n\n```ruby\nclass Foo < Formula\n depends_on \"mg\" => :ruby\n depends_on \"json\" => :python\n depends_on \"Authen::NTLM\" => :perl\nend\n```\n\nNote that we probably won't accept the formulae in this case; it's a far worse user experience than vendoring libraries with resources.\n\n## Test the formula\n\nExit out of the interactive shell.\n\n brew install --verbose --debug foo\n\nDebug will ask you to open an interactive shell when the build fails so you can try to figure out what went wrong.\n\nCheck the top of the `./configure` output (if applicable)! Some configure scripts do not recognize `--disable-debug`. If you see a warning about it, remove the option from the formula.\n\n## Add a test to the formula\n\nPlease add a `test do` block to the formula. This will be run by `brew test foo` and the [Brew Test Bot](Brew-Test-Bot.md).\n\nThe `test do` block automatically creates and changes to a temporary directory which is deleted after run. You can access this Pathname with the `testpath` function.\n\nWe want tests that don't require any user input and test the basic functionality of the application. For example `foo build-foo input.foo` is a good test and (despite their widespread use) `foo --version` and `foo --help` are bad tests. However, a bad test is better than no test at all.\n\nSee [cmake](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/cmake.rb) for an example of a formula with a good test. A basic `CMakeLists.txt` file is written CMake uses it to generate Makefiles. This test checks that CMake doesn't e.g. segfault during basic operation.\n\n## Manuals\n\nHomebrew expects to find man pages in `[prefix]/share/man/...`, and not in `[prefix]/man/...`.\n\nSome software installs to man instead of `share/man`, so check the output and add a `\"--mandir=#{man}\"` to the `./configure` line if needed.\n\n\n## A Quick Word on Naming\n\n**THE NAME IS VERY IMPORTANT!**\n\nName the formula like the project markets the product. So it\u2019s `pkg-config`, not `pkgconfig`; `sdl_mixer`, not `sdl-mixer` or `sdlmixer`.\n\nThe only exception is stuff like \u201cApache Ant\u201d. Apache sticks \u201cApache\u201d in front of everything, but we use the formula name `ant`. We only include the prefix in cases like *GNUplot* (because it\u2019s part of the name) and *GNU Go* (because everyone calls it \u201cGNU go\u201d\u2014nobody just calls it \u201cGo\u201d). The word \u201cGo\u201d is too common and there are too many implementations of it.\n\nIf you\u2019re not sure about the name check the homepage, and check the Wikipedia page.\n\n[ALSO CHECK WHAT DEBIAN CALLS IT!](https://www.debian.org/distrib/packages)\n\nWhere Homebrew already has a formula called `foo` we typically do not accept requests to replace that formula with something else also named `foo`. This is to avoid both confusing and surprising users\u2019 expectation.\n\nWhen two formulae share an upstream name, e.g. [`AESCrypt`](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/aescrypt.rb) and [`AESCrypt`](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/aescrypt-packetizer.rb) the newer formula must typically adapt the name to avoid conflict with the current formula.\n\nIf you\u2019re *still* not sure, just commit. We\u2019ll apply some arbitrary rule and make a decision ;)\n\nWhen importing classes, Homebrew will require the formula and then create an instance of the class. It does this by assuming the formula name can be directly converted to the class name using a `regexp`. The rules are simple:\n\n* `foo-bar.rb` => `FooBar`\n* `foobar.rb` => `Foobar`\n\nThus, if you change the name of the class, you must also rename the file. Filenames should be all lowercase.\n\nAdd aliases by creating symlinks in `Library/Aliases`.\n\n\n## Audit the formula\n\nYou can run `brew audit` to test formulae for adherence to Homebrew house style. This includes warnings for trailing whitespace, preferred URLs for certain source hosts, and a lot of other style issues. Fixing these warnings before committing will make the process a lot smoother for us.\n\nUse `brew info` and check if the version guessed by Homebrew from the URL is\ncorrect. Add an explicit `version` if not.\n\n## Commit\n\nEverything is built on Git, so contribution is easy:\n\n brew install git # if you already have git installed, skip this command\n brew update # required in more ways than you think (initializes the brew git repository if you don't already have it)\n cd `brew --repository`\n # Create a new git branch for your formula so your pull request is easy to\n # modify if any changes come up during review.\n git checkout -b \n git add Library/Formula/foo.rb\n git commit\n\nThe established standard for Git commit messages is:\n\n* the first line is a commit summary of *50 characters or less*, then\n* two (2) newlines, then\n* explain the commit throughly\n\nAt Homebrew, we like to put the name of the formula upfront like so \"foobar 7.3 (new formula)\".\nThis may seem crazy short, but you\u2019ll find that forcing yourself to summarise the commit encourages you to be atomic and concise. If you can\u2019t summarise it in 50-80 characters, you\u2019re probably trying to commit two commits as one. For a more thorough explanation, please read Tim Pope\u2019s excellent blog post, [A Note About Git Commit Messages](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).\n\nThe preferred commit message format for simple version updates is \"foobar 7.3\".\n\nEnsure you reference any relevant GitHub issue `#12345` in the commit message. Homebrew\u2019s history is the first thing future contributors will look to when trying to understand the current state of formulae they\u2019re interested in.\n\n\n## Push\n\nNow you just need to push back to GitHub.\n\nIf you haven\u2019t forked Homebrew yet, [go to the repo and hit the fork button](https://github.com/Homebrew/homebrew).\n\nIf you have already forked Homebrew on GitHub, then you can manually push (just make sure you have been pulling from the Homebrew/homebrew master):\n\n git push git@github.com:myname/homebrew.git \n\nNow, please open a Pull Request (on your GitHub repo page) for new and updated brews.\n\n* One formula per commit; one commit per formula\n* Keep merge commits out of the request\n* If you have any merge or mixup commits, please [squash](http://gitready.com/advanced/2009/02/10/squashing-commits-with-rebase.html) them.\n\nIf a commit touches multiple files, or isn\u2019t one logical bug fix, or a file is touched in multiple commits, we\u2019ll probably ask you to `rebase` and `squash` your commits. For this reason, you should avoid pushing to your `master` branch. Note, after rebase and/or squash, you'll need to push with `--force` to your remote.\n\n\n# Overview of the Formula Install Process\n\n\n* The result of `Formula.download_strategy` is instantiated.\n* `DownloadStrategy.fetch` is called (downloads tarball, checks out git repository, etc.)\n* A temporary sandbox is created in `/tmp/$formulaname`\n* `DownloadStrategy.stage` is called (extracts tarball to above sandbox, exports git repository to sandbox, etc.)\n* Patches are applied\n* Current directory is changed to the stage root (so when you `system make`, it works)\n* `Formula.install` is called\n* Anything installed to the keg is cleaned (see later)\n* The keg is symlinked into Homebrew\u2019s prefix\n* Caveats are displayed\n\n\n# Convenience Tools\n\n## Messaging\n\nThree commands are provided for displaying informational messages to the user:\n\n* `ohai` for general info\n* `opoo` for warning messages\n* `onoe` for error messages\n\nIn particular, when a test needs to be performed before installation use `onoe` to bail out gracefully. For example:\n\n```ruby\nif some_test?\n system \"make\", \"install\"\nelse\n onoe \"Error! Something is wrong.\"\nend\n```\n\n\n## bin.install \"foo\"\n\nYou\u2019ll see stuff like that in other formulae. This installs the file foo into the Formula\u2019s `bin` directory (`/usr/local/Cellar/pkg/0.1/bin`) and makes it executable (`chmod 0555 foo`).\n\n## inreplace\n\nA convenience function that can edit files in-place. For example:\n\n`inreplace \"path\", before, after`\n\n`before` and `after` can be strings or regexps. You can also use the block form:\n\n```ruby\ninreplace \"path\" do |s|\n s.gsub! /foo/, \"bar\"\nend\n```\n\nMake sure you modify `s`! This block ignores the returned value.\n\n`inreplace` should be used instead of patches when it is patching something that will never be accepted upstream e.g. make the software\u2019s build system respect Homebrew\u2019s installation hierarchy. If it's Homebrew and MacPorts or OS X specific it should be turned into a patch instead.\n\nIf you need modify variables in a Makefile, rather than using `inreplace`, pass them as arguments to make:\n\n```rb\nsystem \"make\", \"target\", \"VAR2=value1\", \"VAR2=value2\", \"VAR3=values can have spaces\"\n```\n\n```rb\nargs = %W[\n CC=#{ENV.cc}\n PREFIX=#{prefix}\n]\n\nsystem \"make\", *args\n```\n\nNote that values *can* contain unescaped spaces if you use the multiple-argument form of `system`.\n\n## Patches\n\nWhile patches should generally be avoided, sometimes they are necessary.\n\nWhen patching (i.e. fixing header file inclusion, fixing compiler warnings, etc.) the first thing to do is check whether or not the upstream project is aware of the issue. If not, file a bug report and/or submit your patch for inclusion. We may sometimes still accept your patch before it was submitted upstream but by getting the ball rolling on fixing the upstream issue you reduce the length of time we have to carry the patch around.\n\n*Always, always, always justify a patch with a code comment!* Otherwise, nobody will know when it is safe to remove the patch, or safe to leave it in when updating the formula. The comment should include a link to the relevant upstream issue(s).\n\nExternal patches can be declared using resource-style blocks:\n\n```rb\npatch do\n url \"https://example.com/example_patch.diff\"\n sha256 \"85cc828a96735bdafcf29eb6291ca91bac846579bcef7308536e0c875d6c81d7\"\nend\n```\n\nA strip level of -p1 is assumed. It can be overridden using a symbol argument:\n\n```rb\npatch :p0 do\n url \"https://example.com/example_patch.diff\"\n sha256 \"85cc828a96735bdafcf29eb6291ca91bac846579bcef7308536e0c875d6c81d7\"\nend\n```\n\nPatches can be declared in stable, devel, and head blocks. NOTE: always use a block instead of a conditional, i.e. `stable do ... end` instead of `if build.stable? then ... end`.\n\n```rb\nstable do\n # some other things...\n\n patch do\n url \"https://example.com/example_patch.diff\"\n sha256 \"85cc828a96735bdafcf29eb6291ca91bac846579bcef7308536e0c875d6c81d7\"\n end\nend\n```\n\nEmbedded (__END__) patches can be declared like so:\n\n```rb\npatch :DATA\npatch :p0, :DATA\n```\n\nwith the patch data included at the end of the file:\n\n```\n__END__\ndiff --git a/foo/showfigfonts b/foo/showfigfonts\nindex 643c60b..543379c 100644\n--- a/foo/showfigfonts\n+++ b/foo/showfigfonts\n@@ -14,6 +14,7 @@\n\u2026\n```\n\nPatches can also be embedded by passing a string. This makes it possible to provide multiple embedded patches while making only some of them conditional.\n```rb\npatch :p0, \"...\"\n```\n\nIn embedded patches, the string `HOMEBREW_PREFIX` is replaced with the value of the constant `HOMEBREW_PREFIX` before the patch is applied.\n\n\n## Creating the diff\n\n brew install --interactive --git foo\n \u2026\n (make some edits)\n \u2026\n git diff | pbcopy\n brew edit foo\n\nNow just paste into the formula after `__END__`.\nInstead of `git diff | pbcopy`, for some editors `git diff >> path/to/your/formula/foo.rb` might help you that the diff is not touched (e.g. white space removal, indentation, etc.)\n\n\n\n# Advanced Formula Tricks\n\nIf anything isn\u2019t clear, you can usually figure it out with some `grep` and the `Library/Formula` directory. Please amend this document if you think it will help!\n\n\n## Unstable versions (`HEAD`, `devel`)\n\nFormulae can specify alternate downloads for the upstream project\u2019s `devel` release (unstable but not `trunk`) or `HEAD` (`master/trunk`).\n\n### HEAD\n\nHEAD URLs (activated by passing `--HEAD`) build the development cutting edge. Specifying it is easy:\n\n```ruby\nclass Foo < Formula\n head \"https://github.com/mxcl/lastfm-cocoa.git\"\nend\n```\n\nHomebrew understands `git`, `svn`, and `hg` URLs, and has a way to specify `cvs` repositories as a URL as well. You can test whether the `HEAD` is being built with `build.head?`.\n\nTo use a specific commit, tag, or branch from a repository, specify head with the `:tag` and `:revision`, `:revision`, or `:branch` option, like so:\n\n```ruby\nclass Foo < Formula\n head \"https://github.com/some/package.git\", :revision => \"090930930295adslfknsdfsdaffnasd13\"\n # or :branch => \"develop\"\n # or :tag => \"1_0_release\",\n # :revision => \"090930930295adslfknsdfsdaffnasd13\"\nend\n```\n\nFormulae that only have `head` versions should be submitted to [homebrew/headonly](https://github.com/Homebrew/homebrew-headonly) instead of Homebrew/homebrew.\n\n### devel\n\nThe \"devel\" spec (activated by passing `--devel`) is used for a project\u2019s unstable releases. It is specified in a block:\n\n```ruby\ndevel do\n url \"https://foo.com/foo-0.1.tar.gz\"\n sha256 \"85cc828a96735bdafcf29eb6291ca91bac846579bcef7308536e0c875d6c81d7\"\nend\n```\n\nYou can test if the \"devel\" spec is in use with `build.devel?`.\n\n## Compiler selection\n\nSometimes a package fails to build when using a certain compiler. Since recent Xcode no longer includes a GCC compiler, we cannot simply force the use of GCC. Instead, the correct way to declare this is the `fails_with` DSL method. A properly constructed `fails_with` block documents the latest compiler build version known to cause compilation to fail, and the cause of the failure. For example:\n\n```ruby\nfails_with :llvm do\n build 2335\n cause <<-EOS.undent\n The \"cause\" field should include a short summary of the error. Include\n the URLs of any relevant information, such as upstream bug reports. Wrap\n the text at a sensible boundary (~72-80 characters), but do not break\n URLs over multiple lines.\n EOS\nend\n```\n\n`build` takes a Fixnum (you can find this number in your `brew --config` output). `cause` takes a string, and the use of heredocs is encouraged to improve readability and allow for more comprehensive documentation.\n\n`fails_with` declarations can be used with any of `:gcc`, `:llvm`, and `:clang`. Homebrew will use this information to select a working compiler (if one is available).\n\n\n## Specifying the Download Strategy explicitly\n\nTo use one of Homebrew\u2019s built-in download strategies, specify the `:using =>` flag on a `url` or `head`. For example:\n\n```ruby\nclass Sip < Formula\n url \"http://www.riverbankcomputing.co.uk/hg/sip/archive/4.11\"\n md5 \"dbafd7101a4e7caee6f529912a1356e5\"\n head \"http://www.riverbankcomputing.co.uk/hg/sip\", :using => :hg\n homepage \"http://www.riverbankcomputing.co.uk/software/sip\"\n```\n\nThe downloaders offered by Homebrew are:\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
Value of :usingCorresponds To
:bzrBazaarDownloadStrategy
:curlCurlDownloadStrategy
:cvsCVSDownloadStrategy
:gitGitDownloadStrategy
:hgMercurialDownloadStrategy
:nounzipNoUnzipCurlDownloadStrategy
:postCurlPostDownloadStrategy
:svnSubversionDownloadStrategy
\n\n\nIf you need more control over the way files are downloaded and staged, you can create a custom download strategy and specify it using the `url` method's `:using` option:\n\n\n```ruby\nclass MyDownloadStrategy < SomeHomebrewDownloadStrategy\n # Does something cool\nend\n\nclass Foo < Formula\n url \"something\", :using => MyDownloadStrategy\nend\n```\n\nSpecifying download strategies can be useful when used with a local repo, where a plain URL would not let you specify how to access it. For example:\n\n```ruby\nclass Bar < Formula\n head \"/users/abc/src/git.git\", :using => :git\nend\n```\n\n\n## Just copying some files\n\nWhen your code in the install function is run, the current working directory is set to the extracted tarball.\n\nSo it is easy to just copy some files:\n\n```ruby\nprefix.install \"file1\", \"file2\"\n```\n\nOr everything:\n\n```ruby\nprefix.install Dir[\"output/*\"]\n```\n\nGenerally we'd rather you were specific about what files or directories need to be installed rather than installing everything.\n\n### Variables for directory locations\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
NameDefaultExample
HOMEBREW_PREFIX/usr/local
prefix#{HOMEBREW_PREFIX}/Cellar/#{name}/#{version}/usr/local/Cellar/foo/0.1
opt_prefix#{HOMEBREW_PREFIX}/opt/#{name}/usr/local/opt/foo
bin#{prefix}/bin/usr/local/Cellar/foo/0.1/bin
doc#{prefix}/share/doc/foo/usr/local/Cellar/foo/0.1/share/doc/foo
include#{prefix}/include/usr/local/Cellar/foo/0.1/include
info#{prefix}/share/info/usr/local/Cellar/foo/0.1/share/info
lib#{prefix}/lib/usr/local/Cellar/foo/0.1/lib
libexec#{prefix}/libexec/usr/local/Cellar/foo/0.1/libexec
man#{prefix}/share/man/usr/local/Cellar/foo/0.1/share/man
man[1-8]#{prefix}/share/man/man[1-8]/usr/local/Cellar/foo/0.1/share/man/man[1-8]
sbin#{prefix}/sbin/usr/local/Cellar/foo/0.1/sbin
share#{prefix}/share/usr/local/Cellar/foo/0.1/share
etc#{HOMEBREW_PREFIX}/etc/usr/local/etc
var#{HOMEBREW_PREFIX}/var/usr/local/var
buildpathA temporary dir somewhere on your system/private/tmp/[formula-name]-0q2b/[formula-name]
\n\nThese can be used, for instance, in code such as\n\n```ruby\nbin.install Dir[\"output/*\"]\n```\n\nto install binaries into their correct location into the cellar, and\n\n```ruby\nman.mkpath\n```\n\nto create the directory structure to the man location.\n\nTo install man pages into specific locations, use `man1.install \"foo.1\", \"bar.1\"`, `man2.install \"foo.2\"`, etc.\n\nNote that in the context of Homebrew, `libexec` is reserved for private use by the formula and therefore is not symlinked into `HOMEBREW_PREFIX`.\n\n### Installation without linking into `/usr/local` (keg-only)\n\nIf you only need a program for a dependency and it does not need to be linked for public use in `/usr/local`, specify\n\n```ruby\nkeg_only \"This is my rationale.\"\n```\n\nin the Formula class.\n\n\n## Adding optional steps\n\nIf you want to add an option:\n\n```ruby\nclass Yourformula < Formula\n ...\n option \"with-ham\", \"Description of the option\"\n option \"without-spam\", \"Another description\"\n depends_on \"foo\" => :optional # will automatically add a with-foo option\n ...\n```\n\nAnd then to define the effects the options have:\n\n```ruby\nif build.with? \"ham\"\n # note, no \"with\" in the option name (it is added by the build.with? method)\nend\n\nif build.without? \"ham\"\n # works as you'd expect. True if `--without-ham` was given.\nend\n\nif build.include? \"enable-ham\"\n # the deprecated style, only useful for options other than `with`/`without` style\nend\n```\n\nOption names should be prefixed with one of the words `with`, `without`, `no`, or a verb in the imperative tense describing the action to be taken. For example, an option to run a test suite should be named `--with-test` or `--with-check` rather than `--test`, and an option to enable a shared library should be named `--enable-shared` rather than `--shared`.\n\nNote that options that aren\u2019t ` build.with? ` or ` build.without? ` should be actively deprecated where possible. See [wget](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/wget.rb#L27-L31) for an example.\n\nSee the [graphviz](https://github.com/Homebrew/homebrew/blob/master/Library/Formula/graphviz.rb) formula for an example.\n\n\n## File level operations\n\nYou can use the file utilities provided by Ruby (`FileUtils`). These are included in the `Formula` class, so you do not need the `FileUtils.` prefix to use them. They are documented [here](http://www.ruby-doc.org/stdlib/libdoc/fileutils/rdoc/index.html).\n\nWhen creating symlinks, take special care to ensure they are *relative* symlinks. This makes it easier to create a relocatable bottle. For example, to create a symlink in `bin` to an executable in `libexec`, use\n\n```rb\nbin.install_symlink libexec/\"name\"\n```\n\n*not*\n\n```rb\nln_s libexec/\"name\", bin\n```\n\nThe symlinks created by `install_symlink` are guaranteed to be relative. `ln_s` will only produce a relative symlink when given a relative path.\n\n## Handling files that should persist over formula upgrades\n\nFor example, Ruby 1.9\u2019s gems should be installed to `var/lib/ruby/` so that gems don\u2019t need to be reinstalled when upgrading Ruby. You can usually do this with symlink trickery, or *better* a configure option.\n\n### launchd plist files\n\nHomebrew provides two Formula methods for launchd plist files. `plist_name` will return `homebrew.mxcl.`, and `plist_path` will return, for example, `/usr/local/Cellar/foo/0.1/homebrew.mxcl.foo.plist`.\n\n## Updating formulae\n\nEventually a new version of the software will be released. In this case you should update the `url` and `sha256`. Please leave the `bottle do ... end` block as-is; our CI system will update it when we pull your change.\n\nCheck if the formula you are updating is a dependency for any other formulae by running `brew uses UPDATED_FORMULA`. If it is a dependency please `brew reinstall` all the dependencies after it is installed and verify they work correctly.\n\n# Style guide\n\nHomebrew wants to maintain a consistent Ruby style across all formulae based on [Ruby Style Guide](https://github.com/styleguide/ruby). Other formulae may not have been updated to match this guide yet but all new ones should. Also:\n\n* The order of methods in a formula should be consistent with other formulae (e.g.: `def patches` goes before `def install`)\n* An empty line is required before the `__END__` line\n\n\n\n# Troubleshooting for people writing new formulae\n\n### Version detection fails\n\nHomebrew tries to automatically determine the version from the URL in order to save on duplication. If the tarball has a funny name though, you may have to assign the version number:\n\n```ruby\nclass Foobar\n version \"0.7\"\nend\n```\n\n## Bad Makefiles\n\nNot all projects have makefiles that will run in parallel so try to deparallelize:\n\n brew edit foo\n\nAdd all this to the formula (so there will already be a class line, don\u2019t add another or change that, and there\u2019s already an install function, don't add another one, add the lines in the install function below to the top of the problem formula\u2019s install function).\n\n```ruby\nclass Foo < Formula\n skip_clean :all\n def install\n ENV.deparallelize\n ENV.no_optimization\n system \"make\" # separate make and make install steps\n system \"make\", \"install\"\n end\nend\n```\n\nIf that fixes it, please open an [issue](https://github.com/Homebrew/homebrew/issues) so that we can fix it for everyone.\n\n## Still won\u2019t work?\n\nCheck out what MacPorts and Fink do:\n\n`brew -S --macports foo`\n\n`brew -S --fink foo`\n\n\n\n# Superenv Notes\n\n`superenv` is a \"super\" environment that tries to improve reliability for the general case. But it does make making formula harder.\n\nTo not use `superenv`, install with `--env=std`.\n\nSuperenv isolates builds by removing `/usr/local/bin` and all user-PATHs that are not determined to be essential to the build. It does this because other PATHs are full of stuff that breaks builds. (We have 15,000 tickets as testament!)\n\n`superenv` tries to remove bad-flags from the commands passed to `clang`/`gcc` and injects others (for example all `keg_only` dependencies are added to the `-I` and `-L` flags. If superenv troubles you, try to `brew install --env=std` and report to us if that fixes it.\n\n# Fortran\n\nSome software requires a Fortran compiler. This can be declared by adding `depends_on :fortran` to a formula. `:fortran` is a special dependency that does several things.\n\nFirst, it looks to see if you have set the `FC` environment variable. If it is set, Homebrew will use this value during compilation. If it is not set, it will check to see if `gfortran` is found in `PATH`. If it is, Homebrew will use its location as the value of `FC`. Otherwise, the `gcc` formula will be treated as a dependency and installed prior to compilation.\n\nIf you have set `FC` to a custom Fortran compiler, you may additionally set `FCFLAGS` and `FFLAGS`. Alternatively, you can pass `--default-fortran-flags` to `brew install` to use Homebrew's standard `CFLAGS`.\n\nWhen using Homebrew's own gfortran compiler, the standard `CFLAGS` are used and user-supplied values of `FCFLAGS` and `FFLAGS` are ignored for consistency and reproducibility reasons.\n\n\n# How to start over (reset to `master`)?\n\nHave you created a real mess in git which paralyzes you to create the commit you just want to push?\nThen you might consider start from scratch.\nYour changes will be discarded in favour of the `master` branch:\n\n`git checkout master`\n\n`git reset --hard FETCH_HEAD`\n", "meta": {"content_hash": "8d53a5d3ee66b814a3acde6349ddb05c", "timestamp": "", "source": "github", "line_count": 987, "max_line_length": 476, "avg_line_length": 41.4822695035461, "alnum_prop": 0.7218816403292382, "repo_name": "tylerball/homebrew", "id": "ca05738ca5b42bfeaae355e1ddbf5fd7b6a3e0b1", "size": "41092", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "share/doc/homebrew/Formula-Cookbook.md", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "C++", "bytes": "5889"}, {"name": "Groff", "bytes": "26137"}, {"name": "JavaScript", "bytes": "18"}, {"name": "Perl", "bytes": "547"}, {"name": "PostScript", "bytes": "485"}, {"name": "Ruby", "bytes": "4617578"}, {"name": "Shell", "bytes": "19080"}]}} {"text": "package gr.demokritos.iit.ydsapi.model;\n\nimport com.google.gson.Gson;\nimport com.google.gson.GsonBuilder;\nimport com.google.gson.JsonArray;\nimport com.google.gson.JsonDeserializationContext;\nimport com.google.gson.JsonDeserializer;\nimport com.google.gson.JsonElement;\nimport com.google.gson.JsonObject;\nimport com.google.gson.JsonParseException;\nimport com.google.gson.JsonSerializationContext;\nimport com.google.gson.JsonSerializer;\nimport java.lang.reflect.Type;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.LinkedHashSet;\nimport java.util.List;\nimport java.util.Objects;\nimport java.util.Set;\nimport org.bson.types.ObjectId;\n\n/**\n *\n * @author George K. \n */\npublic class BasketItem {\n\n private final String userID;\n private final String componentParentUUID;\n private final String title;\n private final ObjectId basketItemID;\n private final Set tags;\n private final Set filters;\n private final String compType;\n private final String contentType;\n private final BasketType type;\n private final boolean priv;\n private final String lang;\n\n private BasketItem(Builder builder) {\n this.userID = builder.user_id;\n this.componentParentUUID = builder.component_parent_UUID;\n this.title = builder.title;\n this.basketItemID = builder.basket_item_id;\n this.tags = builder.tags;\n this.filters = builder.filters;\n this.compType = builder.component_type;\n this.contentType = builder.content_type;\n this.type = builder.type;\n this.priv = builder.isPrivate;\n this.lang = builder.lang;\n }\n\n /**\n * obtain json request, utilizes {@link Builder} for safety\n *\n * @param jsonBasketItem\n */\n public BasketItem(String jsonBasketItem) {\n BasketItem bi\n = new GsonBuilder()\n .registerTypeAdapter(BasketItem.class, new BasketItemDeserializer())\n .create()\n .fromJson(jsonBasketItem, getClass());\n this.userID = bi.userID;\n this.componentParentUUID = bi.componentParentUUID;\n this.title = bi.title;\n this.basketItemID = bi.basketItemID;\n this.tags = bi.tags;\n this.filters = bi.filters;\n this.compType = bi.compType;\n this.contentType = bi.contentType;\n this.type = bi.type;\n this.priv = bi.priv;\n this.lang = bi.lang;\n }\n\n public String getUserID() {\n return userID;\n }\n\n public String getComponentParentUUID() {\n return componentParentUUID;\n }\n\n public String getTitle() {\n return title;\n }\n\n public ObjectId getBasketItemID() {\n return basketItemID;\n }\n\n public Set getTags() {\n return tags;\n }\n\n public Set getFilters() {\n return filters;\n }\n\n public String getComponentType() {\n return compType;\n }\n\n public String getContentType() {\n return contentType;\n }\n\n public BasketType getType() {\n return type;\n }\n\n public boolean isPrivate() {\n return priv;\n }\n\n public String getLang() {\n return lang;\n }\n\n @Override\n public String toString() {\n return \"BasketItem{\" + \"user_id=\" + userID\n + \", component_parent_UUID=\" + componentParentUUID\n + \", title=\" + title\n + \", basket_item_id=\" + basketItemID\n + \", tags=\" + tags\n + \", filters=\" + filters\n + \", component_type=\" + compType\n + \", content_type=\" + contentType\n + \", type=\" + type\n + \", priv=\" + priv\n + \", lang=\" + lang + '}';\n }\n\n public String toJSON() {\n GsonBuilder gsonBuilder = new GsonBuilder();\n gsonBuilder\n .registerTypeAdapter(BasketItem.class, new BasketItemSerializer())\n .disableHtmlEscaping()\n .setPrettyPrinting();\n Gson gson = gsonBuilder.create();\n return gson.toJson(this);\n }\n\n public JsonElement toJSONElement() {\n GsonBuilder gsonBuilder = new GsonBuilder();\n gsonBuilder\n .registerTypeAdapter(BasketItem.class, new BasketItemSerializer())\n .disableHtmlEscaping()\n .setPrettyPrinting();\n Gson gson = gsonBuilder.create();\n return gson.toJsonTree(this);\n }\n\n /**\n * we want only these specific fields to uniquely identify a\n * {@link BasketItem}\n *\n * @return\n */\n @Override\n public int hashCode() {\n int hash = 3;\n hash = 17 * hash + Objects.hashCode(this.componentParentUUID);\n hash = 17 * hash + Objects.hashCode(this.title);\n hash = 17 * hash + Objects.hashCode(this.compType);\n hash = 17 * hash + Objects.hashCode(this.contentType);\n hash = 17 * hash + Objects.hashCode(this.type);\n hash = 17 * hash + Objects.hashCode(this.lang);\n return hash;\n }\n\n @Override\n public boolean equals(Object obj) {\n if (obj == null) {\n return false;\n }\n if (getClass() != obj.getClass()) {\n return false;\n }\n final BasketItem other = (BasketItem) obj;\n if (!Objects.equals(this.componentParentUUID, other.componentParentUUID)) {\n return false;\n }\n if (!Objects.equals(this.title, other.title)) {\n return false;\n }\n if (!Objects.equals(this.compType, other.compType)) {\n return false;\n }\n if (!Objects.equals(this.contentType, other.contentType)) {\n return false;\n }\n if (this.type != other.type) {\n return false;\n }\n return Objects.equals(this.lang, other.lang);\n }\n\n /**\n * custom builder class for {@link BasketItem}\n */\n public static class Builder {\n\n private final String user_id;\n private final String component_parent_UUID;\n private final String title;\n private ObjectId basket_item_id;\n private Set tags;\n private Set filters;\n private String component_type;\n private String content_type;\n private BasketType type;\n private Boolean isPrivate;//\n private String lang;\n\n public Builder(String userIDArg, String compParentUUIDArg, String titleArg) {\n this.user_id = userIDArg;\n this.component_parent_UUID = compParentUUIDArg;\n this.title = titleArg;\n }\n\n /**\n *\n * @param lang\n * @return\n */\n public Builder withLang(String lang) {\n this.lang = lang;\n return this;\n }\n\n /**\n *\n * @param tagsArg\n * @return\n */\n public Builder withTags(Collection tagsArg) {\n if (tagsArg == null) {\n this.tags = new LinkedHashSet(0);\n } else {\n this.tags = new LinkedHashSet(tagsArg);\n }\n return this;\n }\n\n /**\n *\n * @param filtersArg\n * @return\n */\n public Builder withFilters(Collection filtersArg) {\n if (filtersArg == null) {\n this.filters = new LinkedHashSet(0);\n } else {\n this.filters = new LinkedHashSet(filtersArg);\n }\n return this;\n }\n\n /**\n *\n * @param compTarg\n * @return\n */\n public Builder withComponentType(String compTarg) {\n String compTargLower = compTarg.toLowerCase();\n if (!ComponentType.ACCEPTED.contains(compTargLower)) {\n throw new IllegalArgumentException(String.format(\"'%s' not accepted as a valid component type\", compTarg));\n } else {\n this.component_type = compTargLower;\n }\n return this;\n }\n\n /**\n *\n * @param contTarg\n * @return\n */\n public Builder withContentType(String contTarg) {\n this.content_type = contTarg;\n return this;\n }\n\n /**\n *\n * @param id\n * @return\n */\n public Builder withID(ObjectId id) {\n this.basket_item_id = id;\n return this;\n }\n\n /**\n *\n * @param typeArg\n * @return\n */\n public Builder withType(BasketType typeArg) {\n this.type = typeArg;\n return this;\n }\n\n /**\n *\n * @param typeArg\n * @return\n */\n public Builder withType(String typeArg) throws IllegalArgumentException {\n if (typeArg.equalsIgnoreCase(BasketType.DATASET.getDecl())) {\n this.type = BasketType.DATASET;\n } else if (typeArg.equalsIgnoreCase(BasketType.VISUALISATION.getDecl())) {\n this.type = BasketType.VISUALISATION;\n } else {\n throw new IllegalArgumentException(\"type must be one of \"\n + Arrays.asList(\n new String[]{\n BasketType.DATASET.getDecl(),\n BasketType.VISUALISATION.getDecl()\n }\n ).toString());\n }\n return this;\n }\n\n /**\n *\n * @param isPrivateArg\n * @return\n */\n public Builder withIsPrivate(boolean isPrivateArg) {\n this.isPrivate = isPrivateArg;\n return this;\n }\n\n public BasketItem build() {\n if (this.lang == null || this.lang.trim().isEmpty()) {\n this.lang = \"en\";\n }\n // default is 'private'\n if (this.isPrivate == null) {\n this.isPrivate = Boolean.TRUE;\n }\n // mandatory fields\n if (this.type == null) {\n throw new IllegalArgumentException(\"declare basket type\");\n }\n if (this.content_type == null) {\n throw new IllegalArgumentException(\"declare content type\");\n }\n if (this.component_type == null) {\n throw new IllegalArgumentException(\"declare component type\");\n }\n return new BasketItem(this);\n }\n }\n\n /**\n * basket types: dataset/visualization, 'ALL' is used to override call\n */\n public enum BasketType {\n\n DATASET(\"dataset\"), VISUALISATION(\"visualisation\"), ALL(\"all\");\n private final String type;\n\n private BasketType(String type) {\n this.type = type;\n }\n\n public String getDecl() {\n return type;\n }\n }\n\n public static final String FLD_USERID = \"user_id\";\n public static final String FLD_BASKET_ITEM_ID = \"basket_item_id\";\n public static final String FLD_OBJ_ID = \"_id\";\n public static final String FLD_COMPONENT_PARENT_UUID = \"component_parent_uuid\";\n public static final String FLD_TITLE = \"title\";\n public static final String FLD_TAGS = \"tags\";\n public static final String FLD_FILTERS = \"filters\";\n public static final String FLD_COMPONENT_TYPE = \"component_type\";\n public static final String FLD_CONTENT_TYPE = \"content_type\";\n public static final String FLD_TYPE = \"type\";\n public static final String FLD_IS_PRIVATE = \"is_private\";\n public static final String FLD_LANG = \"lang\";\n\n /**\n * Helper class to serialize as needed in the API\n */\n class BasketItemSerializer implements JsonSerializer {\n\n @Override\n public JsonElement serialize(BasketItem t, Type type, JsonSerializationContext jsc) {\n final JsonObject jsonObject = new JsonObject();\n jsonObject.addProperty(BasketItem.FLD_USERID, t.getUserID());\n if (t.getBasketItemID() != null) {\n jsonObject.addProperty(BasketItem.FLD_BASKET_ITEM_ID, t.getBasketItemID().toString());\n }\n jsonObject.addProperty(BasketItem.FLD_COMPONENT_PARENT_UUID, t.getComponentParentUUID());\n jsonObject.addProperty(BasketItem.FLD_TITLE, t.getTitle());\n jsonObject.addProperty(BasketItem.FLD_COMPONENT_TYPE, t.getComponentType());\n jsonObject.addProperty(BasketItem.FLD_CONTENT_TYPE, t.getContentType());\n jsonObject.addProperty(BasketItem.FLD_TYPE, t.getType().getDecl());\n jsonObject.addProperty(BasketItem.FLD_IS_PRIVATE, t.isPrivate());\n jsonObject.addProperty(BasketItem.FLD_LANG, t.getLang());\n\n final JsonElement jsonTags = jsc.serialize(t.getTags());\n jsonObject.add(BasketItem.FLD_TAGS, jsonTags);\n\n // add filters\n final JsonArray jsonFilters = new JsonArray();\n for (final BFilter filt : t.getFilters()) {\n final JsonElement jsonfil = filt.toJSONElement();\n jsonFilters.add(jsonfil);\n }\n jsonObject.add(BasketItem.FLD_FILTERS, jsonFilters);\n return jsonObject;\n }\n }\n\n class BasketItemDeserializer implements JsonDeserializer {\n\n @Override\n public BasketItem deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {\n final JsonObject jsonObject = json.getAsJsonObject();\n // initial\n final String user_id = jsonObject.get(BasketItem.FLD_USERID).getAsString();\n final String component_parent_uuid = jsonObject.get(BasketItem.FLD_COMPONENT_PARENT_UUID).getAsString();\n final String title = jsonObject.get(BasketItem.FLD_TITLE).getAsString();\n // init builder object\n Builder b = new Builder(user_id, component_parent_uuid, title);\n // other\n JsonElement jsonbitemID = jsonObject.get(BasketItem.FLD_OBJ_ID);\n if (jsonbitemID != null) {\n ObjectId id = new ObjectId(jsonbitemID.getAsString());\n b = b.withID(id);\n }\n // tags\n final JsonArray jsonTags = jsonObject.get(BasketItem.FLD_TAGS).getAsJsonArray();\n final List lTags = new ArrayList(jsonTags.size());\n for (int i = 0; i < jsonTags.size(); i++) {\n final JsonElement jsonTag = jsonTags.get(i);\n lTags.add(jsonTag.getAsString());\n }\n // add tags\n b = b.withTags(lTags);\n // filters\n final JsonArray jsonFilters = jsonObject.get(BasketItem.FLD_FILTERS).getAsJsonArray();\n final List lFilters = new ArrayList(jsonFilters.size());\n for (int i = 0; i < jsonFilters.size(); i++) {\n final JsonElement jsonFilt = jsonFilters.get(i);\n BFilter bf = new Gson().fromJson(jsonFilt, BFilter.class);\n lFilters.add(bf);\n }\n // add filters \n b = b.withFilters(lFilters);\n // add rest items\n final String component_type = jsonObject.get(BasketItem.FLD_COMPONENT_TYPE).getAsString();\n final String content_type = jsonObject.get(BasketItem.FLD_CONTENT_TYPE).getAsString();\n final String type = jsonObject.get(BasketItem.FLD_TYPE).getAsString();\n final boolean isPrivate = jsonObject.get(BasketItem.FLD_IS_PRIVATE).getAsBoolean();\n final String lang = jsonObject.get(BasketItem.FLD_LANG).getAsString();\n\n b = b.withComponentType(component_type)\n .withContentType(content_type)\n .withType(type)\n .withIsPrivate(isPrivate)\n .withLang(lang);\n\n return b.build();\n }\n }\n}\n", "meta": {"content_hash": "ff8352d76a1d05d222bfa1cf7aa871a5", "timestamp": "", "source": "github", "line_count": 479, "max_line_length": 133, "avg_line_length": 33.292275574112736, "alnum_prop": 0.5690098451119333, "repo_name": "YourDataStories/components-visualisation", "id": "d6b2924d122ec177734768bbab917638cd5222c2", "size": "15947", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "BackendUtilities/src/main/java/gr/demokritos/iit/ydsapi/model/BasketItem.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "42030"}, {"name": "HTML", "bytes": "353801"}, {"name": "Java", "bytes": "164739"}, {"name": "JavaScript", "bytes": "881627"}]}} {"text": "#!/usr/bin/env node\n\nvar sys = require('sys')\n\t, fs = require('fs')\n\t, M = require('./Mustache')\n\t, compressor = require('node-minify');\n\nvar package = require('../package.json');\nvar code = '';\nvar docs = {};\n\ndocs.main = '';\ndocs.API = '';\ndocs.copyrightYear = new Date().getFullYear();\n\n// read in the the main.js file as our main boilerplate code\ncode += fs.readFileSync('./main.js', encoding = 'utf8');\ncode = M.Mustache.to_html(code, {'today': new Date().getTime(), 'version': package.version});\n\ndocs.main += fs.readFileSync('./docs.js', encoding = 'utf8');\n\n// parse entire lib directory and concat it into one file for the browser\nvar lib = paths('./lib');\n\nvar faker = require('../index');\n\n// generate bundle for code on the browser\nfor (var module in faker) {\n\tcode += ( '\\n' + 'faker.' + module + ' = {};');\n\tfor (var method in faker[module]) {\n\t\tcode += ( '\\n' + 'faker.' + module);\n\t\tcode += ( '.' + method + ' = ');\n\n\t\t// serialize arrays as JSON, otherwise use simple string conversion\n\t\tvar methodValue = faker[module][method];\n\t\tif (Array.isArray(methodValue)) {\n\t\t\tcode += JSON.stringify(methodValue) + ';\\n';\n\t\t} else {\n\t\t\tcode += (methodValue.toString() + ';\\n');\n\t\t}\n\t}\n}\n\n// generate nice tree of api for docs\ndocs.API += '
    ';\nfor (var module in faker) {\n\tdocs.API += '
  • ' + module;\n\tdocs.API += '
      ';\n\tfor (var method in faker[module]) {\n\t\tdocs.API += '
    • ' + method + '
    • ';\n\t}\n\tdocs.API += '
    ';\n\tdocs.API += '
  • ';\n}\ndocs.API += '
';\n\n// definitions hack\ncode += 'var definitions = faker.definitions;\\n';\ncode += 'var Helpers = faker.Helpers;\\n';\n\n// if we are running in a CommonJS env, export everything out\ncode +=[\"\\nif (typeof define == 'function'){\",\n\" define(function(){\",\n\"\t\treturn faker;\",\n\" });\",\n\"}\",\n\"else if(typeof module !== 'undefined' && module.exports) {\",\n\"\tmodule.exports = faker;\",\n\"}\",\n\"else {\",\n\"\twindow.faker = faker;\",\n\"}\",\n\"\",\n\"}()); // end faker closure\"].join('\\n');\n\n// generate core library\nfs.writeFile('../faker.js', code, function() {\n\tsys.puts(\"faker.js generated successfully!\");\n});\n\n// generate example js file as well\nfs.writeFile('../examples/js/faker.js', code, function() {\n\tsys.puts(\"faker.js generated successfully!\");\n});\n\nvar docOutput = M.Mustache.to_html(docs.main, {\"API\": docs.API, \"copyrightYear\": docs.copyrightYear});\n\n// generate some samples sets (move this code to another section)\nfs.writeFile('../Readme.md', docOutput, function() {\n\tsys.puts(\"Docs generated successfully!\");\n});\n\n// generates minified version Using Google Closure\nnew compressor.minify({\n type: 'gcc',\n fileIn: '../faker.js',\n fileOut: '../minfaker.js',\n callback: function(err){\n\t\t\tif(err) {\n console.log(err);\n }\n else sys.puts(\"Minified version generated successfully!\");\n }\n});\n\n\n/*********************** BUILD HELPER METHODS *********************/\n\n\t// Recursively traverse a hierarchy, returning a list of all relevant .js files.\nfunction paths(dir) {\n\tvar paths = [];\n\n\ttry {\n\t\tfs.statSync(dir);\n\t}\n\tcatch (e) {\n\t\treturn e;\n\t}\n\n\t(function traverse(dir, stack) {\n\t\tstack.push(dir);\n\t\tfs.readdirSync(stack.join('/')).forEach(function(file) {\n\t\t\tvar path = stack.concat([file]).join('/'),\n\t\t\t\tstat = fs.statSync(path);\n\n\t\t\tif (file[0] == '.' || file === 'vendor') {\n\t\t\t\treturn;\n\t\t\t} else if (stat.isFile() && /\\.js$/.test(file)) {\n\t\t\t\tpaths.push(path);\n\t\t\t} else if (stat.isDirectory()) {\n\t\t\t\tpaths.push(path);\n\t\t\t\ttraverse(file, stack);\n\t\t\t}\n\t\t});\n\t\tstack.pop();\n\t})(dir || '.', []);\n\n\treturn paths;\n}\n", "meta": {"content_hash": "76dc6e97bafea63b4e7ab27aa325fcb2", "timestamp": "", "source": "github", "line_count": 139, "max_line_length": 102, "avg_line_length": 25.280575539568346, "alnum_prop": 0.601593625498008, "repo_name": "kentcdodds/kcd-angular", "id": "08a408fea1ded05047142521e3df8106504dd89e", "size": "3514", "binary": false, "copies": "4", "ref": "refs/heads/main", "path": "resources/bower_components/Faker/BUILD/BUILD.js", "mode": "33261", "license": "mit", "language": [{"name": "CSS", "bytes": "9738"}, {"name": "HTML", "bytes": "37638"}, {"name": "JavaScript", "bytes": "37691"}]}} {"text": "class GroupList\n{\npublic:\n\tGroupList(void);\n\t~GroupList(void);\n\t\n\tGroupData *AddGroup(const char *name);\n\tvoid RemoveGroup(GroupData *data);\n\tbool HasGroup(const char *item);\n\n\tGroupData *FindGroup(const char *name);\n\n\tGroupItem *ItemForGroup(const char *name);\n\tGroupItem *ItemForGroup(GroupData *data);\n};\n\n#endif", "meta": {"content_hash": "8e7fe41f8a29b2686eb7e0795e42f2fe", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 43, "avg_line_length": 18.529411764705884, "alnum_prop": 0.7396825396825397, "repo_name": "HaikuArchives/MrPeeps", "id": "b439962109fd869b64676a5cf89d1db7822481ae", "size": "402", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/GroupList.h", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "9728"}, {"name": "C++", "bytes": "245568"}, {"name": "Makefile", "bytes": "5308"}]}} {"text": "package org.cgfalcon.myprolang.antlr.graphics;// Generated from Graphics.g4 by ANTLR 4.1\nimport org.antlr.v4.runtime.misc.NotNull;\nimport org.antlr.v4.runtime.tree.ParseTreeListener;\n\n/**\n * This interface defines a complete listener for a parse tree produced by\n * {@link GraphicsParser}.\n */\npublic interface GraphicsListener extends ParseTreeListener {\n\t/**\n\t * Enter a parse tree produced by {@link GraphicsParser#point}.\n\t * @param ctx the parse tree\n\t */\n\tvoid enterPoint(@NotNull GraphicsParser.PointContext ctx);\n\t/**\n\t * Exit a parse tree produced by {@link GraphicsParser#point}.\n\t * @param ctx the parse tree\n\t */\n\tvoid exitPoint(@NotNull GraphicsParser.PointContext ctx);\n\n\t/**\n\t * Enter a parse tree produced by {@link GraphicsParser#file}.\n\t * @param ctx the parse tree\n\t */\n\tvoid enterFile(@NotNull GraphicsParser.FileContext ctx);\n\t/**\n\t * Exit a parse tree produced by {@link GraphicsParser#file}.\n\t * @param ctx the parse tree\n\t */\n\tvoid exitFile(@NotNull GraphicsParser.FileContext ctx);\n\n\t/**\n\t * Enter a parse tree produced by {@link GraphicsParser#command}.\n\t * @param ctx the parse tree\n\t */\n\tvoid enterCommand(@NotNull GraphicsParser.CommandContext ctx);\n\t/**\n\t * Exit a parse tree produced by {@link GraphicsParser#command}.\n\t * @param ctx the parse tree\n\t */\n\tvoid exitCommand(@NotNull GraphicsParser.CommandContext ctx);\n}", "meta": {"content_hash": "87223a5ec1d56c43446b988da0d71696", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 88, "avg_line_length": 32.11904761904762, "alnum_prop": 0.7346182357301705, "repo_name": "cgfalcon/myprolang", "id": "aafe2f0572f6db9325e9b005570456535f134cc4", "size": "1349", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/org/cgfalcon/myprolang/antlr/graphics/GraphicsListener.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ANTLR", "bytes": "385"}, {"name": "Java", "bytes": "30673"}]}} {"text": "<#\n Suppress PSAvoidUsingConvertToSecureStringWithPlainText since SecureString\n objects are used for test passwords.\n#>\n[Diagnostics.CodeAnalysis.SuppressMessageAttribute('PSAvoidUsingConvertToSecureStringWithPlainText', '')]\nparam ()\n\n$testJobPrefix = 'MsiPackageTestJob'\n\n<#\n .SYNOPSIS\n Tests if the package with the given Id is installed.\n\n .PARAMETER ProductId\n The ID of the package to test for.\n#>\nfunction Test-PackageInstalledById\n{\n [OutputType([System.Boolean])]\n [CmdletBinding()]\n param\n (\n [Parameter()]\n [System.String]\n $ProductId\n )\n\n $uninstallRegistryKey = 'HKLM:\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall'\n $uninstallRegistryKeyWow64 = 'HKLM:\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows\\CurrentVersion\\Uninstall'\n\n $productEntry = $null\n\n if (-not [System.String]::IsNullOrEmpty($ProductId))\n {\n $productEntryKeyLocation = Join-Path -Path $uninstallRegistryKey -ChildPath $ProductId\n $productEntry = Get-Item -Path $productEntryKeyLocation -ErrorAction 'SilentlyContinue'\n\n if ($null -eq $productEntry)\n {\n $productEntryKeyLocation = Join-Path -Path $uninstallRegistryKeyWow64 -ChildPath $ProductId\n $productEntry = Get-Item $productEntryKeyLocation -ErrorAction 'SilentlyContinue'\n }\n }\n\n return ($null -ne $productEntry)\n}\n\n<#\n .SYNOPSIS\n Starts a simple mock http or https file server. Server will stay on and continue to be able\n to receive requests until the client calls Stop-Server. The server returns the job object\n and an EventWaitHandle object that the client will need to dispose of (by calling Stop-Server)\n once it is done sending requests.\n\n .PARAMETER FilePath\n The path to the file to add on to the mock file server. Should be an MSI file.\n\n .PARAMETER LogPath\n The path to the log file to write output to. This is important for debugging since\n most of the work of this function is done within a separate process. Default value\n will be in PSScriptRoot.\n\n .PARAMETER Https\n Indicates whether the server should use Https. If True then the file server will use Https\n and listen on port 'https://localhost:HttpsPort'. Otherwise the file server will use Http and\n listen on port 'http://localhost:HttpPort'\n Default value is False (Http).\n\n .PARAMETER HttpPort\n Specifies the TCP port to register an Http based HttpListener on.\n\n .PARAMETER HttspPort\n Specifies the TCP port to register an Https based HttpListener on.\n#>\nfunction Start-Server\n{\n [OutputType([System.Collections.Hashtable])]\n [CmdletBinding()]\n param\n (\n [Parameter(Mandatory = $true)]\n [ValidateNotNullOrEmpty()]\n [System.String]\n $FilePath,\n\n [Parameter()]\n [System.String]\n $LogPath = (Join-Path -Path $PSScriptRoot -ChildPath 'PackageTestLogFile.txt'),\n\n [Parameter()]\n [System.Boolean]\n $Https = $false,\n\n [Parameter(Mandatory = $true)]\n [ValidateScript({$_ -gt 0})]\n [System.UInt16]\n $HttpPort,\n\n [Parameter(Mandatory = $true)]\n [ValidateScript({$_ -gt 0})]\n [System.UInt16]\n $HttpsPort\n )\n\n # Create an event object to let the client know when the server is ready to begin receiving requests.\n $fileServerStarted = New-Object -TypeName 'System.Threading.EventWaitHandle' -ArgumentList @($false, [System.Threading.EventResetMode]::ManualReset,\n 'HttpIntegrationTest.FileServerStarted')\n $null = $fileServerStarted.Reset()\n\n <#\n The server is run on a separate process so that it can receive requests\n while the tests continue to run. It takes in the same parameterss that are passed\n in to this function. All helper functions that the server uses have to be\n defined within the scope of this script.\n #>\n $server =\n {\n param (\n [Parameter()]\n $FilePath,\n\n [Parameter()]\n $LogPath,\n\n [Parameter()]\n $Https,\n\n [Parameter()]\n $HttpPort,\n\n [Parameter()]\n $HttpsPort\n )\n\n <#\n .SYNOPSIS\n Stops the listener, removes the SSL binding if applicable, and closes the listener.\n\n .PARAMETER HttpListener\n The listner to stop and close.\n\n .PARAMETER Https\n Indicates whether https was used and if so, removes the SSL binding.\n\n .PARAMETER HttspPort\n Specifies the TCP port to de-register an Https based HttpListener from.\n #>\n function Stop-Listener\n {\n [CmdletBinding()]\n param\n (\n [Parameter(Mandatory = $true)]\n [System.Net.HttpListener]\n $HttpListener,\n\n [Parameter(Mandatory = $true)]\n [System.Boolean]\n $Https,\n\n [Parameter(Mandatory = $true)]\n [ValidateScript({$_ -gt 0})]\n [System.UInt16]\n $HttpsPort\n )\n\n Write-Log -LogFile $LogPath -Message 'Finished listening for requests. Shutting down HTTP server.'\n\n $ipPort = \"0.0.0.0:$HttpsPort\"\n\n if ($null -eq $HttpListener)\n {\n $errorMessage = 'HttpListener was null when trying to close'\n Write-Log -LogFile $LogPath -Message $errorMessage\n\n if ($Https)\n {\n Invoke-ConsoleCommand -Target $ipPort -Action 'removing SSL certificate binding' -ScriptBlock {\n netsh http delete sslcert ipPort=\"$ipPort\"\n }\n }\n\n throw $errorMessage\n }\n\n if ($HttpListener.IsListening)\n {\n Write-Log -LogFile $LogPath -Message 'HttpListener is about to be stopped'\n $HttpListener.Stop()\n }\n\n if ($Https)\n {\n Write-Log -LogFile $LogPath -Message 'Removing SSL binding'\n # Remove SSL Binding\n Invoke-ConsoleCommand -Target $ipPort -Action 'removing SSL certificate binding' -ScriptBlock {\n netsh http delete sslcert ipPort=\"$ipPort\"\n }\n }\n\n Write-Log -LogFile $LogPath -Message 'Closing listener'\n $HttpListener.Close()\n\n $null = netsh advfirewall set allprofiles state on\n }\n\n <#\n .SYNOPSIS\n Creates and registers an SSL certificate for Https connections.\n\n .PARAMETER HttspPort\n Specifies the TCP port to register an Https based HttpListener on.\n #>\n function Register-Ssl\n {\n [CmdletBinding()]\n param\n (\n [Parameter(Mandatory = $true)]\n [ValidateScript({$_ -gt 0})]\n [System.UInt16]\n $HttpsPort\n )\n\n # Create certificate\n $certificate = New-SelfSignedCertificate -CertStoreLocation 'Cert:\\LocalMachine\\My' -DnsName localhost\n Write-Log -LogFile $LogPath -Message 'Created certificate'\n\n $hash = $certificate.Thumbprint\n $certPassword = ConvertTo-SecureString -String 'password12345' -AsPlainText -Force\n $tempPath = 'C:\\certForTesting'\n\n $null = Export-PfxCertificate -Cert $certificate -FilePath $tempPath -Password $certPassword\n $null = Import-PfxCertificate -CertStoreLocation 'Cert:\\LocalMachine\\Root' -FilePath 'C:\\certForTesting' -Password $certPassword\n Remove-Item -Path $tempPath\n\n Write-Log -LogFile $LogPath -Message 'Finished importing certificate into root. About to bind it to port.'\n\n # Use net shell command to directly bind certificate to designated testing port\n $null = netsh http add sslcert ipport=0.0.0.0:$HttpsPort certhash=$hash appid='{833f13c2-319a-4799-9d1a-5b267a0c3593}' clientcertnegotiation=enable\n }\n\n <#\n .SYNOPSIS\n Defines the callback function required for BeginGetContext.\n\n .PARAMETER Callback\n The callback script - in this case the requestListener script defined below.\n #>\n function New-ScriptBlockCallback\n {\n [CmdletBinding()]\n param\n (\n [Parameter(Mandatory = $true)]\n [ValidateNotNullOrEmpty()]\n [System.Management.Automation.ScriptBlock]\n $Callback\n )\n\n # Add the CallbackEventBridge type if it's not already defined\n if (-not ('CallbackEventBridge' -as [System.Type]))\n {\n Add-Type @'\n using System;\n\n public sealed class CallbackEventBridge {\n public event AsyncCallback CallbackComplete = delegate { };\n\n private CallbackEventBridge() {}\n\n private void CallbackInternal(IAsyncResult result)\n {\n CallbackComplete(result);\n }\n\n public AsyncCallback Callback\n {\n get { return new AsyncCallback(CallbackInternal); }\n }\n\n public static CallbackEventBridge Create()\n {\n return new CallbackEventBridge();\n }\n }\n'@\n }\n\n $bridge = [CallbackEventBridge]::Create()\n Register-ObjectEvent -InputObject $bridge -EventName 'CallbackComplete' -Action $Callback -MessageData $args > $null\n $bridge.Callback\n\n Write-Log -LogFile $LogPath -Message 'Finished callback function'\n }\n\n <#\n .SYNOPSIS\n Invokes a console command and captures the exit code.\n\n .PARAMETER Target\n Where the command is being executed.\n\n .PARAMETER Action\n A description of the action being performed.\n\n .PARAMETER ScriptBlock\n The code to execute.\n\n #>\n function Invoke-ConsoleCommand\n {\n [CmdletBinding()]\n param\n (\n [Parameter(Mandatory = $true)]\n [System.String]\n $Target,\n\n [Parameter(Mandatory = $true)]\n [System.String]\n $Action,\n\n [Parameter(Mandatory = $true)]\n [System.Management.Automation.ScriptBlock]\n $ScriptBlock\n )\n\n $output = Invoke-Command -ScriptBlock $ScriptBlock\n\n if ($LASTEXITCODE)\n {\n $output = $output -join [Environment]::NewLine\n $message = ('Failed action ''{0}'' on target ''{1}'' (exit code {2}): {3}' -f $Action,$Target,$LASTEXITCODE,$output)\n Write-Error -Message $message\n Write-Log -LogFile $LogPath -Message \"Error from Invoke-ConsoleCommand: $message\"\n }\n else\n {\n $nonNullOutput = $output | Where-Object { $_ -ne $null }\n Write-Log -LogFile $LogPath -Message \"Output from Invoke-ConsoleCommand: $nonNullOutput\"\n }\n }\n\n <#\n .SYNOPSIS\n Writes the specified message to the specified log file.\n Does NOT overwrite what is already written there.\n\n .PARAMETER LogFile\n The path to the file to write to.\n\n .PARAMETER Message\n The message to write to the file.\n #>\n function Write-Log\n {\n [CmdletBinding()]\n param\n (\n [Parameter(Mandatory = $true)]\n [System.String]\n $LogFile,\n\n [Parameter(Mandatory = $true)]\n [System.String]\n $Message\n )\n\n $Message >> $LogFile\n }\n\n # End of function declarations - Beginning of function execution\n\n if ($null -eq (Get-NetFirewallRule -DisplayName 'UnitTestRule' -ErrorAction 'SilentlyContinue'))\n {\n $null = New-NetFirewallRule -DisplayName 'UnitTestRule' -Direction 'Inbound' -Program \"$PSHome\\powershell.exe\" -Authentication 'NotRequired' -Action 'Allow'\n }\n\n $null = netsh advfirewall set allprofiles state off\n\n Write-Log -LogFile $LogPath -Message (Get-Date)\n\n $HttpListener = New-Object 'System.Net.HttpListener'\n $fileServerStarted = $null\n\n try\n {\n # Set up the listener\n if ($Https)\n {\n $HttpListener.Prefixes.Add([Uri] \"https://localhost:$HttpsPort\")\n\n try\n {\n Register-SSL -HttpsPort $HttpsPort\n }\n catch\n {\n $errorMessage = \"Unable to bind SSL certificate to port. Error: $_\"\n Write-Log -LogFile $LogPath -Message $errorMessage\n throw $errorMessage\n }\n\n Write-Log -LogFile $LogPath -Message 'Certificate is registered'\n }\n else\n {\n $HttpListener.Prefixes.Add([Uri] \"http://localhost:$HttpPort\")\n }\n\n Write-Log -LogFile $LogPath -Message 'Finished listener setup - about to start listener'\n\n $HttpListener.Start()\n\n # Cue the tests that the listener is started and can begin receiving requests\n $fileServerStarted = New-Object -TypeName 'System.Threading.EventWaitHandle' `\n -ArgumentList @($false,\n [System.Threading.EventResetMode]::AutoReset,\n 'HttpIntegrationTest.FileServerStarted'\n )\n $fileServerStarted.Set()\n\n Write-Log -LogFile $LogPath -Message 'Listener is started'\n\n <#\n .SYNOPSIS\n Script block called by the callback function for BeginGetContext.\n Ends the current BeginGetContext, copies the response, and calls BeginGetContext again\n to continue receiving requests.\n\n .PARAMETER Result\n th IAsyncResult containing the listener object and path to the MSI file.\n\n #>\n $requestListener =\n {\n [CmdletBinding()]\n param\n (\n [Parameter()]\n [IAsyncResult]\n $Result\n )\n\n Write-Log -LogFile $LogPath -Message 'Starting request listener'\n\n $asyncState = $Result.AsyncState\n [System.Net.HttpListener] $listener = $asyncState.Listener\n $filepath = $asyncState.FilePath\n\n Write-Log -LogFile $LogPath -Message (ConvertTo-Json $asyncState)\n\n # Call EndGetContext to complete the asynchronous operation.\n $context = $listener.EndGetContext($Result)\n\n $response = $null\n\n try\n {\n # Prepare binary buffer for http/https response\n $fileInfo = New-Object -TypeName 'System.IO.FileInfo' -ArgumentList @( $filePath )\n $numBytes = $fileInfo.Length\n $fileStream = New-Object -TypeName 'System.IO.FileStream' -ArgumentList @( $filePath, 'Open' )\n $binaryReader = New-Object -TypeName 'System.IO.BinaryReader' -ArgumentList @( $fileStream )\n [System.Byte[]] $buf = $binaryReader.ReadBytes($numBytes)\n $fileStream.Close()\n\n Write-Log -LogFile $LogPath -Message 'Buffer prepared for response'\n\n $response = $context.Response\n $response.ContentType = 'application/octet-stream'\n $response.ContentLength64 = $buf.Length\n $response.OutputStream.Write($buf, 0, $buf.Length)\n\n Write-Log -LogFile $LogPath -Message 'Response written'\n\n $response.OutputStream.Flush()\n\n # Open the response stream again to receive more requests\n $listener.BeginGetContext((New-ScriptBlockCallback -Callback $requestListener), $asyncState)\n }\n catch\n {\n $errorMessage = \"error writing response: $_\"\n Write-Log -LogFile $LogPath -Message $errorMessage\n throw $errorMessage\n }\n finally\n {\n if ($null -ne $response)\n {\n $response.Dispose()\n }\n }\n }\n\n # Register the request listener scriptblock as the async callback\n $HttpListener.BeginGetContext((New-ScriptBlockCallback -Callback $requestListener), @{\n Listener = $Httplistener\n FilePath = $FilePath\n }) | Out-Null\n Write-Log -LogFile $LogPath -Message 'First BeginGetContext called'\n\n # Ensure that the request listener stays on until the server is done receiving responses - client is responsible for stopping the server.\n while ($true)\n {\n Start-Sleep -Milliseconds 100\n }\n }\n catch\n {\n $errorMessage = \"There were problems setting up the HTTP(s) listener. Error: $_\"\n\n Write-Log -LogFile $LogPath -Message $errorMessage\n\n 'Error Record Info' >> $LogPath\n $_ | ConvertTo-Xml -As String >> $LogPath\n\n 'Exception Info' >> $LogPath\n $_.Exception | ConvertTo-Xml -As String >> $LogPath\n\n 'Running Process Info' >> $LogPath\n Get-Process | Format-List | Out-String >> $LogPath\n\n 'Open TCP Connections Info' >> $LogPath\n Get-NetTCPConnection | Format-List | Out-String >> $LogPath\n\n throw $_\n }\n finally\n {\n if ($fileServerStarted)\n {\n $fileServerStarted.Dispose()\n }\n\n Write-Log -LogFile $LogPath -Message 'Stopping the Server'\n Stop-Listener -HttpListener $HttpListener -Https $Https -HttpsPort $HttpsPort\n }\n }\n\n if ($Https)\n {\n $jobName = $testJobPrefix + 'Https'\n }\n else\n {\n $jobName = $testJobPrefix + 'Http'\n }\n\n $job = Start-Job -ScriptBlock $server -Name $jobName -ArgumentList @( $FilePath, $LogPath, $Https, $HttpPort, $HttpsPort )\n\n # Verify that the job is receivable and does not contain an exception. If it does, re-throw it.\n try\n {\n $null = $job | Receive-Job\n }\n catch\n {\n Write-Error -Message 'Failed to setup HTTP(S) listener for MsiPackage Tests'\n throw $_\n }\n\n <#\n Return the event object so that client knows when it can start sending requests and\n the job object so that the client can stop the job once it is done sending requests.\n #>\n return @{\n FileServerStarted = $fileServerStarted\n Job = $job\n }\n}\n\n<#\n .SYNOPSIS\n Disposes the EventWaitHandle object and stops and removes the job to ensure that proper\n cleanup is done for the listener. If this function is not called after Start-Server then\n the listening port will remain open until the job is stopped or the machine is rebooted.\n\n .PARAMETER FileServerStarted\n The EventWaitHandle object returned by Start-Server to let the client know that it is ready\n to receive requests. The client is responsible for calling this function to ensure that\n this object is disposed of once the client is done sending requests.\n\n .PARAMETER Job\n The job object returned by Start-Server that needs to be stopped so that the server will\n close the listening port.\n#>\nfunction Stop-Server\n{\n [CmdletBinding()]\n param\n (\n [Parameter()]\n [System.Threading.EventWaitHandle]\n $FileServerStarted,\n\n [Parameter()]\n [System.Management.Automation.Job]\n $Job\n )\n\n if ($null -ne $FileServerStarted)\n {\n $FileServerStarted.Dispose()\n }\n\n if ($null -ne $Job)\n {\n Stop-Job -Job $Job\n Remove-Job -Job $Job\n }\n}\n\n<#\n .SYNOPSIS\n Removes any jobs associated with HTTP(S) servers that were created\n for MsiPackage tests.\n#>\nfunction Stop-EveryTestServerInstance\n{\n [CmdletBinding()]\n param ()\n\n Get-Job -Name \"$($testJobPrefix)*\" | Stop-Job\n Get-Job -Name \"$($testJobPrefix)*\" | Remove-Job\n}\n\n<#\n .SYNOPSIS\n Creates a new MSI package for testing.\n\n .PARAMETER DestinationPath\n The path at which to create the test msi file.\n#>\nfunction New-TestMsi\n{\n [CmdletBinding()]\n param\n (\n [Parameter(Mandatory = $true)]\n [ValidateNotNullOrEmpty()]\n [System.String]\n $DestinationPath\n )\n\n #region msiContentInBase64\n $msiContentInBase64 = '0M8R4KGxGuEAAAAAAAAAAAAAAAAAAAAAPgAEAP7/DAAGAAAAAAAAAAEAAAABAAAAAQA' + `\n 'AAAAAAAAAEAAAAgAAAAEAAAD+////AAAAAAAAAAD/////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '////////////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP3////+/////v///wYAAAD+////BAAAAP7////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '////////////////////////////////////////////////////////////9SAG8AbwB0ACAARQBuAHQAcgB' + `\n '5AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFgAFAP//////////CQAAAIQQ' + `\n 'DAAAAAAAwAAAAAAAAEYAAAAAAAAAAAAAAADwRqG1qh/OAQMAAAAAEwAAAAAAAAUAUwB1AG0AbQBhAHIAeQBJA' + `\n 'G4AZgBvAHIAbQBhAHQAaQBvAG4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAoAAIA////////////////AA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADwCAAAAAAAAQEj/P+RD7EHkRaxEMUgAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAgETAAAABAAAAP////8A' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJAAAAOAcAAAAAAABASMpBMEOxOztCJkY3QhxCN' + `\n 'EZoRCZCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGAACAQsAAAAKAAAA/////w' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACYAAAAwAAAAAAAAAEBIykEwQ7E/Ej8oRThCsUE' + `\n 'oSAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAUAAIBDAAAAP//////////' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJwAAABgAAAAAAAAAQEjKQflFzkaoQfhFKD8oR' + `\n 'ThCsUEoSAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABgAAgD///////////////' + `\n '8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAoAAAAKgAAAAAAAABASIxE8ERyRGhEN0gAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADgACAP//////////////' + `\n '/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACkAAAAMAAAAAAAAAEBIDUM1QuZFckU8SAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOAAIADgAAAAIAAAD///' + `\n '//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKgAAABIAAAAAAAAAQEgPQuRFeEUoSAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwAAgD/////////////' + `\n '//8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAArAAAAEAAAAAAAAABASA9C5EV4RSg7MkSzR' + `\n 'DFC8UU2SAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFgACAQcAAAADAAAA//' + `\n '///wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACwAAAAEAAAAAAAAAEBIUkT2ReRDrzs7QiZ' + `\n 'GN0IcQjRGaEQmQgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAaAAIBBQAAAAEAAAD/' + `\n '////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALQAAAHIAAAAAAAAAQEhSRPZF5EOvPxI/K' + `\n 'EU4QrFBKEgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABYAAgH///////////' + `\n '////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAvAAAAMAAAAAAAAABASBVBeETmQoxE8UH' + `\n 'sRaxEMUgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFAACAP//////////' + `\n '/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADAAAAAEAAAAAAAAAEBIWUXyRGhFN0cAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMAAIBDwAAAP////' + `\n '//////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMQAAACQAAAAAAAAAQEgbQipD9kU1RwA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwAAgEQAAAADQAA' + `\n 'AP////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyAAAADAAAAAAAAABASN5EakXkQShIA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADAACAP////////' + `\n '///////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADMAAAAgAAAAAAAAAEBIfz9kQS9CNkg' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMAAIBEQAAAAgA' + `\n 'AAD/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANAAAACAAAAAAAAAAQEg/O/JDOESxR' + `\n 'QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwAAgD///////' + `\n '////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA1AAAAWAIAAAAAAABASD8/d0VsRGo' + `\n '+skQvSAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAACAP//////' + `\n '/////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD8AAAAYAwAAAAAAAEBIPz93RWxEa' + `\n 'jvkRSRIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAIBBgAAAB' + `\n 'IAAAD/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABQAAAFAaAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/////' + `\n '//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP////' + `\n '///////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////' + `\n '////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD///' + `\n '////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//' + `\n '/////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA//' + `\n '//////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/' + `\n '//////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP' + `\n '///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n '////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'D///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AP///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AA////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQA' + `\n 'AAAIAAAADAAAABAAAAAUAAAAGAAAABwAAAAgAAAD+////CgAAAAsAAAAMAAAADQAAAA4AAAAPAAAAEAAAABEA' + `\n 'AAASAAAAEwAAABQAAAAVAAAAFgAAABcAAAAYAAAAGQAAABoAAAAbAAAAHAAAAB0AAAAeAAAAHwAAACAAAAAhA' + `\n 'AAAIgAAACMAAAAkAAAAJQAAAP7////+/////v////7////+/////v////7////+////LgAAAP7////+/////v' + `\n '////7////+/////v////7///82AAAANwAAADgAAAA5AAAAOgAAADsAAAA8AAAAPQAAAD4AAAD+////QAAAAEE' + `\n 'AAABCAAAAQwAAAEQAAABFAAAARgAAAEcAAABIAAAASQAAAEoAAABLAAAA/v//////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '/////////////////////////////////////////////////////////////////////////////////////' + `\n '///////////////////7/AAAGAQIAAAAAAAAAAAAAAAAAAAAAAAEAAADghZ/y+U9oEKuRCAArJ7PZMAAAAAwC' + `\n 'AAAOAAAAAQAAAHgAAAACAAAAgAAAAAMAAACgAAAABAAAAMQAAAAFAAAA9AAAAAYAAAAIAQAABwAAAGwBAAAJA' + `\n 'AAAgAEAAAwAAACwAQAADQAAALwBAAAOAAAAyAEAAA8AAADQAQAAEgAAANgBAAATAAAABAIAAAIAAADkBAAAHg' + `\n 'AAABYAAABJbnN0YWxsYXRpb24gRGF0YWJhc2UAAAAeAAAAGwAAAEEgcGFja2FnZSBmb3IgdW5pdCB0ZXN0aW5' + `\n 'nAAAeAAAAKAAAAE1pY3Jvc29mdCBVbml0IFRlc3RpbmcgR3VpbGQgb2YgQW1lcmljYQAeAAAACgAAAEluc3Rh' + `\n 'bGxlcgAAAB4AAABcAAAAVGhpcyBpbnN0YWxsZXIgZGF0YWJhc2UgY29udGFpbnMgdGhlIGxvZ2ljIGFuZCBkY' + `\n 'XRhIHJlcXVpcmVkIHRvIGluc3RhbGwgRFNDVW5pdFRlc3RQYWNrYWdlLgAeAAAACwAAAEludGVsOzEwMzMAAB' + `\n '4AAAAnAAAAe0YxN0FGREExLUREMEItNDRFNi1CNDczLTlFQkUyREJEOUVBOX0AAEAAAAAAAOO0qh/OAUAAAAA' + `\n 'AAOO0qh/OAQMAAADIAAAAAwAAAAIAAAAeAAAAIwAAAFdpbmRvd3MgSW5zdGFsbGVyIFhNTCAoMy43LjEyMDQu' + `\n 'MCkAAAMAAAACAAAAAAAAAAYABgAGAAYABgAGAAYABgAGAAYACgAKACIAIgAiACkAKQApACoAKgAqACsAKwArA' + `\n 'CsAKwArADEAMQAxAD4APgA+AD4APgA+AD4APgBNAE0AUgBSAFIAUgBSAFIAUgBSAGAAYABgAGEAYQBhAGIAYg' + `\n 'BmAGYAZgBmAGYAZgByAHIAdgB2AHYAdgB2AHYAgACAAIAAgACAAIAAgAACAAUACwAMAA0ADgAPABAAEQASAAc' + `\n 'ACQAjACUAJwAjACUAJwAjACUAJwAlACsALQAwADMANgAxADoAPAALADAAMwA+AEAAQgBFAEcATgBQACcAMwBQ' + `\n 'AFIAVQBYAFoAXAAjACUAJwAjACUAJwALACUAZwBpAGsAbQBvAHEABwByAAEABwBQAHYAeAB6ADMAXACBAIMAh' + `\n 'QCJAIsACAAIABgAGAAYABgAGAAIABgAGAAIAAgACAAYABgACAAYABgACAAYABgAGAAIABgACAAIABgACAAYAA' + `\n 'gAGAAYAAgACAAYABgAGAAIAAgACAAIABgACAAIAAgACAAYABgACAAYABgACAAYABgACAAIAAgACAAYABgAGAA' + `\n 'YAAgACAAYABgACAAIAAgACAAIABgACAAYABgAGAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAgAEAAAAAAAAA' + `\n 'AAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAA/P//fwAAAAAAAAAA/P//fwAAAAAAAAAA/P//fwAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAACAAAAAAAAAAA' + `\n 'ABAACAAAAAgAAAAAAAAAAAAAAAAAAAAIAAAAAAAAAAAAAAAAAAAAAA/P//fwAAAAAAAAAA/P//fwAAAAAAAAA' + `\n 'AAQAAgAAAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////fwAAAAAAAACAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAACA/////wAAAAAAAAAA/////wAAA' + `\n 'AAAAAAAAAAAAAAAAAD/fwCAAAAAAAAAAAD/fwCAAAAAAAAAAAD/fwCAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/38AgP9/AIAAAAAAAAAAAP//////fwCAAAA' + `\n 'AAAAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAAAAD/fwCAAAAAAAAAAAD/fwCAAAAAAAAAAAD/fwCA/////wAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADAACAAAAAAP////8AAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAxAAAANw' + `\n 'AAADEAAAAAADEAAAAAAD4AAAAAAAAAPgArAAAAAAArAAAAAAAAAFIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAArAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAGAAAABgAAAAAABgAAAAAABgAAAAAAAAAGAAYAAAAAAAYAAAAAAA' + `\n 'AABgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAAAAAAAAAAAAAAAAAAAAAAAAB' + `\n 'MAEwAfAB8AAAAAAAAAAAATAAAAAAAAABMAJQAAABMAJQAAABMAJQAAACUAEwAuABMAAAATABMAEwA8AB8ASQA' + `\n 'AABMAEwAfAAAAAAATABMAAAAAABMAEwBWAAAAWgBcABMAJQAAABMAJQAAAGQAJQAAAAAAHwBtAB8AcgAfABMA' + `\n 'ZABkABMAEwAAAHsAAABcAC4AHwAfAGQASQAAAAAAAAAAAB0AAAAAABYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFAAVACEAIAAeABw' + `\n 'AGgAXABsAGQAAAAAAJAAmACgAJAAmACgAJAAmACgANQAsAC8AMgA0ADgAOQA7AD0ARABKAEwAPwBBAEMARgBI' + `\n 'AE8AUQBfAF4AVABTAFcAWQBbAF0AJAAmACgAJAAmACgAZQBjAGgAagBsAG4AcABzAHUAdAB9AH4AfwB3AHkAf' + `\n 'ACIAIcAggCEAIYAigCMAAAAAAAAAAAAjQCOAI8AkACRAJIAkwCUAAAAAAAAAAAAAAAAAAAAAAAgg4SD6IN4hd' + `\n 'yFPI+gj8iZAAAAAAAAAAAAAAAAAAAAAI0AjgCPAJUAAAAAAAAAAAAgg4SD6IMUhQAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACNAI8AkACRAJQAlgCXAAAAAAAAAAAAAAAAAAAAIIPog3iF3IXImZyY' + `\n 'AJkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmACZAJoABIAAAJsAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJoAnACeAJwAngAAAJ0AnwCgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAChAAAAogAAAAKAAYAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAoQCYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAI0AjgCPAJAAkQCUAJYAlwCjAKQApQCmAKcAqACpAKoAqwCsAK0AA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgg4SD6IN4hdyFyJmcmACZGYBkgLyCsIRAhg' + `\n 'iHKIqIk3CX1Jd5hQAAAAAAAAAAAAAAAAAAjQCOAI8AlQCjAKQApQCmAAAAAAAAAAAAAAAAAAAAAAAgg4SD6IM' + `\n 'UhRmAZIC8grCEAAAAAAAAAAAAAAAAAAAAAK4ArwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACBALAAsgC0ALYAuAC6AL0AvwC8ALEAswC1ALcAuQC7AL4AwAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmwACgMEAwgDDAJgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALwAvAAAALsAuwAAAAAAAAABAACAAgAAgAAAAADEAMUAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGACIAKQAqACsAMQA+AE0AUgBgAGEAYgBmAHIAdgCAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABgAGAAYABgAGAAYABgAGAAYABgAiACIAIgApACkAKQAqACoAK' + `\n 'gArACsAKwArACsAKwAxADEAMQA+AD4APgA+AD4APgA+AD4ATQBNAFIAUgBSAFIAUgBSAFIAUgBgAGAAYABhAG' + `\n 'EAYQBiAGIAZgBmAGYAZgBmAGYAcgByAHYAdgB2AHYAdgB2AIAAgACAAIAAgACAAIAAAYACgAOABIAFgAaAB4A' + `\n 'IgAmACoABgAKAA4ABgAKAA4ABgAKAA4ABgAKAA4AEgAWABoABgAKAA4ABgAKAA4AEgAWABoAHgAiAAYACgAGA' + `\n 'AoADgASABYAGgAeACIABgAKAA4ABgAKAA4ABgAKAAYACgAOABIAFgAaAAYACgAGAAoADgASABYAGgAGAAoADg' + `\n 'ASABYAGgAeAAgAFABAAEgAPABEADgANAAwACwAjACUAJwAjACUAJwAjACUAJwArAC0AMAAzACUANgAxADoAPA' + `\n 'A+AEAAQgALAEUARwAwADMATgBQAFIAUABVAFgAWgBcADMAJwAjACUAJwAjACUAJwAlAAsAZwBpAGsAbQBvAHE' + `\n 'AcgAHAHYAeAB6AAEABwBQAIEAgwCFAFwAMwCJAIsAIK0grQSNBJEEkf+dApUgnf+d/51Irf+dApVIrf+dApVI' + `\n 'rf+dApVIrSadSI0Chf+dSJ1IrUid/48mrSadQJ//nwKVAoVInQKFJq1IrUitSI3/jwSBSJ0UnQKVBIFIrf+dA' + `\n 'pVIrf+dApX/rf+PAqUEgUCf/50gnUidSK0Aj0itAoX/j/+fAJ9IjSatFL0Uvf+9BKH/nUiNAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAIABQACAAAAAAAAAAAABgACAAsAFQAFAAUAAQA' + `\n 'mAAoAAQATAAIACwAGAAMAAgAIAAIACQACAAgAAgBudGVnZXIgdG8gZGV0ZXJtaW5lIHNvcnQgb3JkZXIgZm9y' + `\n 'IHRhYmxlLkxhc3RTZXF1ZW5jZUZpbGUgc2VxdWVuY2UgbnVtYmVyIGZvciB0aGUgbGFzdCBmaWxlIGZvciB0a' + `\n 'GlzIG1lZGlhLkRpc2tQcm9tcHREaXNrIG5hbWU6IHRoZSB2aXNpYmxlIHRleHQgYWN0dWFsbHkgcHJpbnRlZC' + `\n 'BvbiB0aGUgZGlzay4gIFRoaXMgd2lsbCBiZSB1c2VkIHRvIHByb21wdCB0aGUgdXNlciB3aGVuIHRoaXMgZGl' + `\n 'zayBuZWVkcyB0byBiZSBpbnNlcnRlZC5DYWJpbmV0SWYgc29tZSBvciBhbGwgb2YgdGhlIGZpbGVzIHN0b3Jl' + `\n 'ZCBvbiB0aGUgbWVkaWEgYXJlIGNvbXByZXNzZWQgaW4gYSBjYWJpbmV0LCB0aGUgbmFtZSBvZiB0aGF0IGNhY' + `\n 'mluZXQuVm9sdW1lTGFiZWxUaGUgbGFiZWwgYXR0cmlidXRlZCB0byB0aGUgdm9sdW1lLlNvdXJjZVByb3Blcn' + `\n 'R5VGhlIHByb3BlcnR5IGRlZmluaW5nIHRoZSBsb2NhdGlvbiBvZiB0aGUgY2FiaW5ldCBmaWxlLk5hbWUgb2Y' + `\n 'gcHJvcGVydHksIHVwcGVyY2FzZSBpZiBzZXR0YWJsZSBieSBsYXVuY2hlciBvciBsb2FkZXIuU3RyaW5nIHZh' + `\n 'bHVlIGZvciBwcm9wZXJ0eS4gIE5ldmVyIG51bGwgb3IgZW1wdHkuUmVnaXN0cnlQcmltYXJ5IGtleSwgbm9uL' + `\n 'WxvY2FsaXplZCB0b2tlbi5Sb290VGhlIHByZWRlZmluZWQgcm9vdCBrZXkgZm9yIHRoZSByZWdpc3RyeSB2YW' + `\n 'x1ZSwgb25lIG9mIHJya0VudW0uS2V5UmVnUGF0aFRoZSBrZXkgZm9yIHRoZSByZWdpc3RyeSB2YWx1ZS5UaGU' + `\n 'gcmVnaXN0cnkgdmFsdWUgbmFtZS5UaGUgcmVnaXN0cnkgdmFsdWUuRm9yZWlnbiBrZXkgaW50byB0aGUgQ29t' + `\n 'cG9uZW50IHRhYmxlIHJlZmVyZW5jaW5nIGNvbXBvbmVudCB0aGF0IGNvbnRyb2xzIHRoZSBpbnN0YWxsaW5nI' + `\n 'G9mIHRoZSByZWdpc3RyeSB2YWx1ZS5VcGdyYWRlVXBncmFkZUNvZGVUaGUgVXBncmFkZUNvZGUgR1VJRCBiZW' + `\n 'xvbmdpbmcgdG8gdGhlIHByb2R1Y3RzIGluIHRoaXMgc2V0LlZlcnNpb25NaW5UaGUgbWluaW11bSBQcm9kdWN' + `\n '0VmVyc2lvbiBvZiB0aGUgcHJvZHVjdHMgaW4gdGhpcyBzZXQuICBUaGUgc2V0IG1heSBvciBtYXkgbm90IGlu' + `\n 'Y2x1ZGUgcHJvZHVjdHMgd2l0aCB0aGlzIHBhcnRpY3VsYXIgdmVyc2lvbi5WZXJzaW9uTWF4VGhlIG1heGltd' + `\n 'W0gUHJvZHVjdFZlcnNpb24gb2YgdGhlIHByb2R1Y3RzIGluIHRoaXMgc2V0LiAgVGhlIHNldCBtYXkgb3IgbW' + `\n 'F5IG5vdCBpbmNsdWRlIHByb2R1Y3RzIHdpdGggdGhpcyBwYXJ0aWN1bGFyIHZlcnNpb24uQSBjb21tYS1zZXB' + `\n 'hcmF0ZWQgbGlzdCBvZiBsYW5ndWFnZXMgZm9yIGVpdGhlciBwcm9kdWN0cyBpbiB0aGlzIHNldCBvciBwcm9k' + `\n 'dWN0cyBub3QgaW4gdGhpcyBzZXQuVGhlIGF0dHJpYnV0ZXMgb2YgdGhpcyBwcm9kdWN0IHNldC5SZW1vdmVUa' + `\n 'GUgbGlzdCBvZiBmZWF0dXJlcyB0byByZW1vdmUgd2hlbiB1bmluc3RhbGxpbmcgYSBwcm9kdWN0IGZyb20gdG' + `\n 'hpcyBzZXQuICBUaGUgZGVmYXVsdCBpcyAiQUxMIi5BY3Rpb25Qcm9wZXJ0eVRoZSBwcm9wZXJ0eSB0byBzZXQ' + `\n 'gd2hlbiBhIHByb2R1Y3QgaW4gdGhpcyBzZXQgaXMgZm91bmQuQ29zdEluaXRpYWxpemVGaWxlQ29zdENvc3RG' + `\n 'aW5hbGl6ZUluc3RhbGxWYWxpZGF0ZUluc3RhbGxJbml0aWFsaXplSW5zdGFsbEFkbWluUGFja2FnZUluc3Rhb' + `\n 'GxGaWxlc0luc3RhbGxGaW5hbGl6ZUV4ZWN1dGVBY3Rpb25QdWJsaXNoRmVhdHVyZXNQdWJsaXNoUHJvZHVjdF' + `\n 'Byb2R1Y3RDb21wb25lbnR7OTg5QjBFRDgtREVBRC01MjhELUI4RTMtN0NBRTQxODYyNEQ1fUlOU1RBTExGT0x' + `\n 'ERVJEdW1teUZsYWdWYWx1ZVByb2dyYW1GaWxlc0ZvbGRlcnE0cGZqNHo3fERTQ1NldHVwUHJvamVjdFRBUkdF' + `\n 'VERJUi5Tb3VyY2VEaXJQcm9kdWN0RmVhdHVyZURTQ1NldHVwUHJvamVjdEZpbmRSZWxhdGVkUHJvZHVjdHNMY' + `\n 'XVuY2hDb25kaXRpb25zVmFsaWRhdGVQcm9kdWN0SURNaWdyYXRlRmVhdHVyZVN0YXRlc1Byb2Nlc3NDb21wb2' + `\n '5lbnRzVW5wdWJsaXNoRmVhdHVyZXNSZW1vdmVSZWdpc3RyeVZhbHVlc1dyaXRlUmVnaXN0cnlWYWx1ZXNSZWd' + `\n 'pc3RlclVzZXJSZWdpc3RlclByb2R1Y3RSZW1vdmVFeGlzdGluZ1Byb2R1Y3RzTk9UIFdJWF9ET1dOR1JBREVf' + `\n 'REVURUNURURBIG5ld2VyIHZlcnNpb24gb2YgW1Byb2R1Y3ROYW1lXSBpcyBhbHJlYWR5IGluc3RhbGxlZC5BT' + `\n 'ExVU0VSUzFNYW51ZmFjdHVyZXJNaWNyb3NvZnQgVW5pdCBUZXN0aW5nIEd1aWxkIG9mIEFtZXJpY2FQcm9kdW' + `\n 'N0Q29kZXtERUFEQkVFRi04MEM2LTQxRTYtQTFCOS04QkRCOEEwNTAyN0Z9UHJvZHVjdExhbmd1YWdlMTAzM1B' + `\n 'yb2R1Y3ROYW1lRFNDVW5pdFRlc3RQYWNrYWdlUHJvZHVjdFZlcnNpb24xLjIuMy40ezgzQkMzNzkyLTgwQzYt' + `\n 'NDFFNi1BMUI5LThCREI4QTA1MDI3Rn1TZWN1cmVDdXN0b21Qcm9wZXJ0aWVzV0lYX0RPV05HUkFERV9ERVRFQ' + `\n '1RFRDtXSVhfVVBHUkFERV9ERVRFQ1RFRFdpeFBkYlBhdGhDOlxVc2Vyc1xiZWNhcnJcRG9jdW1lbnRzXFZpc3' + `\n 'VhbCBTdHVkaW8gMjAxMFxQcm9qZWN0c1xEU0NTZXR1cFByb2plY3RcRFNDU2V0dXBQcm9qZWN0XGJpblxEZWJ' + `\n '1Z1xEU0NTZXR1cFByb2plY3Qud2l4cGRiU29mdHdhcmVcRFNDVGVzdERlYnVnRW50cnlbfl1EVU1NWUZMQUc9' + `\n 'W0RVTU1ZRkxBR11bfl1XSVhfVVBHUkFERV9ERVRFQ1RFRFdJWF9ET1dOR1JBREVfREVURUNURURzZWQgdG8gZ' + `\n 'm9yY2UgYSBzcGVjaWZpYyBkaXNwbGF5IG9yZGVyaW5nLkxldmVsVGhlIGluc3RhbGwgbGV2ZWwgYXQgd2hpY2' + `\n 'ggcmVjb3JkIHdpbGwgYmUgaW5pdGlhbGx5IHNlbGVjdGVkLiBBbiBpbnN0YWxsIGxldmVsIG9mIDAgd2lsbCB' + `\n 'kaXNhYmxlIGFuIGl0ZW0gYW5kIHByZXZlbnQgaXRzIGRpc3BsYXkuVXBwZXJDYXNlVGhlIG5hbWUgb2YgdGhl' + `\n 'IERpcmVjdG9yeSB0aGF0IGNhbiBiZSBjb25maWd1cmVkIGJ5IHRoZSBVSS4gQSBub24tbnVsbCB2YWx1ZSB3a' + `\n 'WxsIGVuYWJsZSB0aGUgYnJvd3NlIGJ1dHRvbi4wOzE7Mjs0OzU7Njs4Ozk7MTA7MTY7MTc7MTg7MjA7MjE7Mj' + `\n 'I7MjQ7MjU7MjY7MzI7MzM7MzQ7MzY7Mzc7Mzg7NDg7NDk7NTA7NTI7NQAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAATmFtZVRhYmxlQ29sdW1uX1Zh' + `\n 'bGlkYXRpb25WYWx1ZU5Qcm9wZXJ0eUlkX1N1bW1hcnlJbmZvcm1hdGlvbkRlc2NyaXB0aW9uU2V0Q2F0ZWdvc' + `\n 'nlLZXlDb2x1bW5NYXhWYWx1ZU51bGxhYmxlS2V5VGFibGVNaW5WYWx1ZUlkZW50aWZpZXJOYW1lIG9mIHRhYm' + `\n 'xlTmFtZSBvZiBjb2x1bW5ZO05XaGV0aGVyIHRoZSBjb2x1bW4gaXMgbnVsbGFibGVZTWluaW11bSB2YWx1ZSB' + `\n 'hbGxvd2VkTWF4aW11bSB2YWx1ZSBhbGxvd2VkRm9yIGZvcmVpZ24ga2V5LCBOYW1lIG9mIHRhYmxlIHRvIHdo' + `\n 'aWNoIGRhdGEgbXVzdCBsaW5rQ29sdW1uIHRvIHdoaWNoIGZvcmVpZ24ga2V5IGNvbm5lY3RzVGV4dDtGb3JtY' + `\n 'XR0ZWQ7VGVtcGxhdGU7Q29uZGl0aW9uO0d1aWQ7UGF0aDtWZXJzaW9uO0xhbmd1YWdlO0lkZW50aWZpZXI7Qm' + `\n 'luYXJ5O1VwcGVyQ2FzZTtMb3dlckNhc2U7RmlsZW5hbWU7UGF0aHM7QW55UGF0aDtXaWxkQ2FyZEZpbGVuYW1' + `\n 'lO1JlZ1BhdGg7Q3VzdG9tU291cmNlO1Byb3BlcnR5O0NhYmluZXQ7U2hvcnRjdXQ7Rm9ybWF0dGVkU0RETFRl' + `\n 'eHQ7SW50ZWdlcjtEb3VibGVJbnRlZ2VyO1RpbWVEYXRlO0RlZmF1bHREaXJTdHJpbmcgY2F0ZWdvcnlUZXh0U' + `\n '2V0IG9mIHZhbHVlcyB0aGF0IGFyZSBwZXJtaXR0ZWREZXNjcmlwdGlvbiBvZiBjb2x1bW5BZG1pbkV4ZWN1dG' + `\n 'VTZXF1ZW5jZUFjdGlvbk5hbWUgb2YgYWN0aW9uIHRvIGludm9rZSwgZWl0aGVyIGluIHRoZSBlbmdpbmUgb3I' + `\n 'gdGhlIGhhbmRsZXIgRExMLkNvbmRpdGlvbk9wdGlvbmFsIGV4cHJlc3Npb24gd2hpY2ggc2tpcHMgdGhlIGFj' + `\n 'dGlvbiBpZiBldmFsdWF0ZXMgdG8gZXhwRmFsc2UuSWYgdGhlIGV4cHJlc3Npb24gc3ludGF4IGlzIGludmFsa' + `\n 'WQsIHRoZSBlbmdpbmUgd2lsbCB0ZXJtaW5hdGUsIHJldHVybmluZyBpZXNCYWRBY3Rpb25EYXRhLlNlcXVlbm' + `\n 'NlTnVtYmVyIHRoYXQgZGV0ZXJtaW5lcyB0aGUgc29ydCBvcmRlciBpbiB3aGljaCB0aGUgYWN0aW9ucyBhcmU' + `\n 'gdG8gYmUgZXhlY3V0ZWQuICBMZWF2ZSBibGFuayB0byBzdXBwcmVzcyBhY3Rpb24uQWRtaW5VSVNlcXVlbmNl' + `\n 'QWR2dEV4ZWN1dGVTZXF1ZW5jZUNvbXBvbmVudFByaW1hcnkga2V5IHVzZWQgdG8gaWRlbnRpZnkgYSBwYXJ0a' + `\n 'WN1bGFyIGNvbXBvbmVudCByZWNvcmQuQ29tcG9uZW50SWRHdWlkQSBzdHJpbmcgR1VJRCB1bmlxdWUgdG8gdG' + `\n 'hpcyBjb21wb25lbnQsIHZlcnNpb24sIGFuZCBsYW5ndWFnZS5EaXJlY3RvcnlfRGlyZWN0b3J5UmVxdWlyZWQ' + `\n 'ga2V5IG9mIGEgRGlyZWN0b3J5IHRhYmxlIHJlY29yZC4gVGhpcyBpcyBhY3R1YWxseSBhIHByb3BlcnR5IG5h' + `\n 'bWUgd2hvc2UgdmFsdWUgY29udGFpbnMgdGhlIGFjdHVhbCBwYXRoLCBzZXQgZWl0aGVyIGJ5IHRoZSBBcHBTZ' + `\n 'WFyY2ggYWN0aW9uIG9yIHdpdGggdGhlIGRlZmF1bHQgc2V0dGluZyBvYnRhaW5lZCBmcm9tIHRoZSBEaXJlY3' + `\n 'RvcnkgdGFibGUuQXR0cmlidXRlc1JlbW90ZSBleGVjdXRpb24gb3B0aW9uLCBvbmUgb2YgaXJzRW51bUEgY29' + `\n 'uZGl0aW9uYWwgc3RhdGVtZW50IHRoYXQgd2lsbCBkaXNhYmxlIHRoaXMgY29tcG9uZW50IGlmIHRoZSBzcGVj' + `\n 'aWZpZWQgY29uZGl0aW9uIGV2YWx1YXRlcyB0byB0aGUgJ1RydWUnIHN0YXRlLiBJZiBhIGNvbXBvbmVudCBpc' + `\n 'yBkaXNhYmxlZCwgaXQgd2lsbCBub3QgYmUgaW5zdGFsbGVkLCByZWdhcmRsZXNzIG9mIHRoZSAnQWN0aW9uJy' + `\n 'BzdGF0ZSBhc3NvY2lhdGVkIHdpdGggdGhlIGNvbXBvbmVudC5LZXlQYXRoRmlsZTtSZWdpc3RyeTtPREJDRGF' + `\n '0YVNvdXJjZUVpdGhlciB0aGUgcHJpbWFyeSBrZXkgaW50byB0aGUgRmlsZSB0YWJsZSwgUmVnaXN0cnkgdGFi' + `\n 'bGUsIG9yIE9EQkNEYXRhU291cmNlIHRhYmxlLiBUaGlzIGV4dHJhY3QgcGF0aCBpcyBzdG9yZWQgd2hlbiB0a' + `\n 'GUgY29tcG9uZW50IGlzIGluc3RhbGxlZCwgYW5kIGlzIHVzZWQgdG8gZGV0ZWN0IHRoZSBwcmVzZW5jZSBvZi' + `\n 'B0aGUgY29tcG9uZW50IGFuZCB0byByZXR1cm4gdGhlIHBhdGggdG8gaXQuVW5pcXVlIGlkZW50aWZpZXIgZm9' + `\n 'yIGRpcmVjdG9yeSBlbnRyeSwgcHJpbWFyeSBrZXkuIElmIGEgcHJvcGVydHkgYnkgdGhpcyBuYW1lIGlzIGRl' + `\n 'ZmluZWQsIGl0IGNvbnRhaW5zIHRoZSBmdWxsIHBhdGggdG8gdGhlIGRpcmVjdG9yeS5EaXJlY3RvcnlfUGFyZ' + `\n 'W50UmVmZXJlbmNlIHRvIHRoZSBlbnRyeSBpbiB0aGlzIHRhYmxlIHNwZWNpZnlpbmcgdGhlIGRlZmF1bHQgcG' + `\n 'FyZW50IGRpcmVjdG9yeS4gQSByZWNvcmQgcGFyZW50ZWQgdG8gaXRzZWxmIG9yIHdpdGggYSBOdWxsIHBhcmV' + `\n 'udCByZXByZXNlbnRzIGEgcm9vdCBvZiB0aGUgaW5zdGFsbCB0cmVlLkRlZmF1bHREaXJUaGUgZGVmYXVsdCBz' + `\n 'dWItcGF0aCB1bmRlciBwYXJlbnQncyBwYXRoLkZlYXR1cmVQcmltYXJ5IGtleSB1c2VkIHRvIGlkZW50aWZ5I' + `\n 'GEgcGFydGljdWxhciBmZWF0dXJlIHJlY29yZC5GZWF0dXJlX1BhcmVudE9wdGlvbmFsIGtleSBvZiBhIHBhcm' + `\n 'VudCByZWNvcmQgaW4gdGhlIHNhbWUgdGFibGUuIElmIHRoZSBwYXJlbnQgaXMgbm90IHNlbGVjdGVkLCB0aGV' + `\n 'uIHRoZSByZWNvcmQgd2lsbCBub3QgYmUgaW5zdGFsbGVkLiBOdWxsIGluZGljYXRlcyBhIHJvb3QgaXRlbS5U' + `\n 'aXRsZVNob3J0IHRleHQgaWRlbnRpZnlpbmcgYSB2aXNpYmxlIGZlYXR1cmUgaXRlbS5Mb25nZXIgZGVzY3Jpc' + `\n 'HRpdmUgdGV4dCBkZXNjcmliaW5nIGEgdmlzaWJsZSBmZWF0dXJlIGl0ZW0uRGlzcGxheU51bWVyaWMgc29ydC' + `\n 'BvcmRlciwgdXNlZCB0byBmb3JjZSBhIHNwZWNpZmljIGRpc3BsYXkgb3JkZXJpbmcuTGV2ZWxUaGUgaW5zdGF' + `\n 'sbCBsZXZlbCBhdCB3aGljaCByZWNvcmQgd2lsbCBiZSBpbml0aWFsbHkgc2VsZWN0ZWQuIEFuIGluc3RhbGwg' + `\n 'bGV2ZWwgb2YgMCB3aWxsIGRpc2FibGUgYW4gaXRlbSBhbmQgcHJldmVudCBpdHMgZGlzcGxheS5VcHBlckNhc' + `\n '2VUaGUgbmFtZSBvZiB0aGUgRGlyZWN0b3J5IHRoYXQgY2FuIGJlIGNvbmZpZ3VyZWQgYnkgdGhlIFVJLiBBIG' + `\n '5vbi1udWxsIHZhbHVlIHdpbGwgZW5hYmxlIHRoZSBicm93c2UgYnV0dG9uLjA7MTsyOzQ7NTs2Ozg7OTsxMDs' + `\n 'xNjsxNzsxODsyMDsyMTsyMjsyNDsyNTsyNjszMjszMzszNDszNjszNzszODs0ODs0OTs1MDs1Mjs1Mzs1NEZl' + `\n 'YXR1cmUgYXR0cmlidXRlc0ZlYXR1cmVDb21wb25lbnRzRmVhdHVyZV9Gb3JlaWduIGtleSBpbnRvIEZlYXR1c' + `\n 'mUgdGFibGUuQ29tcG9uZW50X0ZvcmVpZ24ga2V5IGludG8gQ29tcG9uZW50IHRhYmxlLkZpbGVQcmltYXJ5IG' + `\n 'tleSwgbm9uLWxvY2FsaXplZCB0b2tlbiwgbXVzdCBtYXRjaCBpZGVudGlmaWVyIGluIGNhYmluZXQuICBGb3I' + `\n 'gdW5jb21wcmVzc2VkIGZpbGVzLCB0aGlzIGZpZWxkIGlzIGlnbm9yZWQuRm9yZWlnbiBrZXkgcmVmZXJlbmNp' + `\n 'bmcgQ29tcG9uZW50IHRoYXQgY29udHJvbHMgdGhlIGZpbGUuRmlsZU5hbWVGaWxlbmFtZUZpbGUgbmFtZSB1c' + `\n '2VkIGZvciBpbnN0YWxsYXRpb24sIG1heSBiZSBsb2NhbGl6ZWQuICBUaGlzIG1heSBjb250YWluIGEgInNob3' + `\n 'J0IG5hbWV8bG9uZyBuYW1lIiBwYWlyLkZpbGVTaXplU2l6ZSBvZiBmaWxlIGluIGJ5dGVzIChsb25nIGludGV' + `\n 'nZXIpLlZlcnNpb25WZXJzaW9uIHN0cmluZyBmb3IgdmVyc2lvbmVkIGZpbGVzOyAgQmxhbmsgZm9yIHVudmVy' + `\n 'c2lvbmVkIGZpbGVzLkxhbmd1YWdlTGlzdCBvZiBkZWNpbWFsIGxhbmd1YWdlIElkcywgY29tbWEtc2VwYXJhd' + `\n 'GVkIGlmIG1vcmUgdGhhbiBvbmUuSW50ZWdlciBjb250YWluaW5nIGJpdCBmbGFncyByZXByZXNlbnRpbmcgZm' + `\n 'lsZSBhdHRyaWJ1dGVzICh3aXRoIHRoZSBkZWNpbWFsIHZhbHVlIG9mIGVhY2ggYml0IHBvc2l0aW9uIGluIHB' + `\n 'hcmVudGhlc2VzKVNlcXVlbmNlIHdpdGggcmVzcGVjdCB0byB0aGUgbWVkaWEgaW1hZ2VzOyBvcmRlciBtdXN0' + `\n 'IHRyYWNrIGNhYmluZXQgb3JkZXIuSW5zdGFsbEV4ZWN1dGVTZXF1ZW5jZUluc3RhbGxVSVNlcXVlbmNlTGF1b' + `\n 'mNoQ29uZGl0aW9uRXhwcmVzc2lvbiB3aGljaCBtdXN0IGV2YWx1YXRlIHRvIFRSVUUgaW4gb3JkZXIgZm9yIG' + `\n 'luc3RhbGwgdG8gY29tbWVuY2UuRm9ybWF0dGVkTG9jYWxpemFibGUgdGV4dCB0byBkaXNwbGF5IHdoZW4gY29' + `\n 'uZGl0aW9uIGZhaWxzIGFuZCBpbnN0YWxsIG11c3QgYWJvcnQuTWVkaWFEaXNrSWRQcmltYXJ5IGtleSwgaQgA' + `\n 'AgAIAAIACAACAAoAFgANAAEADgABAAMAAQAeAAEAAQAnABUAAQAVAAEANgABACQAAQD1AAEADwABAAQACQAgA' + `\n 'AEAFQABABQABwAGAAoAQgAFAAkAFQCfAAUACAAMAG8ABQAPAAcAEwAHAAkAEgA7AAEACwACAAQAAgA+AAEACg' + `\n 'AEAAkADADSAAEACgAIACcAAQDoAAEABwACABwAAQDjAAEAhgABABAAAgCmAAEACgADACkAAQAHABUAOQABAA4' + `\n 'AAgCUAAEABQACAC4AAQA6AAEABwACAD4AAQAFAAIAgQABAAkAAgBrAAEAUQABABIAAQARAAUACAACAB8AAQAK' + `\n 'AAYAIQABAAQAFABzAAEAOQABAAgAAgAIAAEAYwABAAgAAgAlAAEABwADAEEAAQAIAAYAPwABAHYAAQBKAAEAF' + `\n 'gAHABEABwAPAAUASAABAAkABABIAAEABQANAAYAAgA3AAEADAACADYAAQAKAAIAhAABAAcAAwBmAAEACwACAC' + `\n 'MAAQAGAAIACAAIADcAAQA+AAEAMAABAAgADwAhAAEABAACAD8AAQADAAIABwABAB8AAQAYAAEAEwABAG4AAQA' + `\n 'HAA8ACwADADsAAQAKAAIAfgABAAoAAgB+AAEAYAABACMAAQAGAAIAYAABAA4AAgA4AAEADgAFAAgABAAMAAUA' + `\n 'DwADABEAAwATAAEADAABAA8AAwANAAIADwACAA4AAgAQAAMAJgABAA0AAgAOAAIAEgACABgAAQAJAAIAAQABA' + `\n 'AkAAQAOAAIADwABABMAAgAQAAIAEQACABQAAgARAAEAEQABABQAAQATAAEADAABAA8AAQAWAAEAGgABADYAAQ' + `\n 'AIAAEAAQABAAwAAQAnAAEACwABACYAAQAPAAEABAABAAsAAQASAAEADgABAAcAAwAmAAMAFgABACsAAQAKAAE' + `\n 'AdgABABAAAQAKAAEAGwABABQAAQAWAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + `\n 'AAAAAAAAAAAAAAAAAAA='\n #endregion\n\n $msiContentInBytes = [System.Convert]::FromBase64String($msiContentInBase64)\n\n Set-Content -Path $DestinationPath -Value $msiContentInBytes -Encoding 'Byte' | Out-Null\n}\n\n<#\n .SYNOPSIS\n Clears the Package cache where files are downloaded from the file server when applicable.\n#>\nfunction Clear-PackageCache\n{\n [CmdletBinding()]\n param ()\n\n $packageCacheLocation = \"$env:ProgramData\\Microsoft\\Windows\\PowerShell\\Configuration\\\" + `\n 'BuiltinProvCache\\MSFT_xPackageResource'\n\n Remove-Item -Path $packageCacheLocation -ErrorAction 'SilentlyContinue' -Recurse\n}\n\n<#\n .SYNOPSIS\n Tests if the package with the given name is installed.\n\n .PARAMETER Name\n The name of the package to test for.\n#>\nfunction Test-PackageInstalledByName\n{\n [OutputType([System.Boolean])]\n [CmdletBinding()]\n param\n (\n [Parameter()]\n [System.String]\n $Name\n )\n\n $uninstallRegistryKey = 'HKLM:\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall'\n $uninstallRegistryKeyWow64 = 'HKLM:\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows\\CurrentVersion\\Uninstall'\n\n $productEntry = $null\n\n foreach ($registryKeyEntry in (Get-ChildItem -Path @( $uninstallRegistryKey, $uninstallRegistryKeyWow64) -ErrorAction 'Ignore' ))\n {\n if ($Name -eq (Get-LocalizedRegistryKeyValue -RegistryKey $registryKeyEntry -ValueName 'DisplayName'))\n {\n $productEntry = $registryKeyEntry\n break\n }\n }\n\n return ($null -ne $productEntry)\n}\n\n<#\n .SYNOPSIS\n Retrieves a localized registry key value.\n\n .PARAMETER RegistryKey\n The registry key to retrieve the value from.\n\n .PARAMETER ValueName\n The name of the value to retrieve.\n#>\nfunction Get-LocalizedRegistryKeyValue\n{\n [CmdletBinding()]\n param\n (\n [Parameter()]\n [System.Object]\n $RegistryKey,\n\n [Parameter(Mandatory = $true)]\n [ValidateNotNullOrEmpty()]\n [System.String]\n $ValueName\n )\n\n $localizedRegistryKeyValue = $RegistryKey.GetValue('{0}_Localized' -f $ValueName)\n\n if ($null -eq $localizedRegistryKeyValue)\n {\n $localizedRegistryKeyValue = $RegistryKey.GetValue($ValueName)\n }\n\n return $localizedRegistryKeyValue\n}\n\n<#\n .SYNOPSIS\n Creates a new test executable.\n\n .PARAMETER DestinationPath\n The path at which to create the test executable.\n#>\nfunction New-TestExecutable\n{\n [CmdletBinding()]\n param\n (\n [Parameter(Mandatory = $true)]\n [ValidateNotNullOrEmpty()]\n [System.String]\n $DestinationPath\n )\n\n if (Test-Path -Path $DestinationPath)\n {\n Write-Verbose -Message \"Removing old executable at $DestinationPath...\"\n Remove-Item -Path $DestinationPath -Force\n }\n\n $testExecutableCode = @'\n using System;\n using System.Collections.Generic;\n using System.Linq;\n using System.Management;\n using System.Text;\n using System.Threading.Tasks;\n using System.Management.Automation;\n using System.Management.Automation.Runspaces;\n using System.Runtime.InteropServices;\n namespace Providers.Package.UnitTests.MySuite\n {\n class ExeTestClass\n {\n public static void Main(string[] args)\n {\n string cmdline = System.Environment.CommandLine;\n Console.WriteLine(\"Cmdline was \" + cmdline);\n int endIndex = cmdline.IndexOf(\"\\\"\", 1);\n string self = cmdline.Substring(0, endIndex);\n string other = cmdline.Substring(self.Length + 1);\n string msiexecpath = System.IO.Path.Combine(System.Environment.SystemDirectory, \"msiexec.exe\");\n\n self = self.Replace(\"\\\"\", \"\");\n string packagePath = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(self), \"DSCSetupProject.msi\");\n\n string msiexecargs = String.Format(\"/i \\\"{0}\\\" {1}\", packagePath, other);\n System.Diagnostics.Process.Start(msiexecpath, msiexecargs).WaitForExit();\n }\n }\n }\n'@\n\n Add-Type -TypeDefinition $testExecutableCode -OutputAssembly $DestinationPath -OutputType 'ConsoleApplication'\n}\n\nExport-ModuleMember -Function `\n New-TestMsi, `\n Clear-PackageCache, `\n New-TestExecutable, `\n Start-Server, `\n Stop-Server, `\n Test-PackageInstalledByName, `\n Test-PackageInstalledById, `\n Stop-EveryTestServerInstance\n", "meta": {"content_hash": "85d124a5a34af959d579a0150b1aa61a", "timestamp": "", "source": "github", "line_count": 1324, "max_line_length": 168, "avg_line_length": 58.58459214501511, "alnum_prop": 0.6052265167728128, "repo_name": "PowerShell/xPSDesiredStateConfiguration", "id": "e340a9674f367714b38aef81eb9ad5725d0b06ac", "size": "77566", "binary": false, "copies": "1", "ref": "refs/heads/Issue-729", "path": "tests/TestHelpers/DSC_xPackageResource.TestHelper.psm1", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "7766"}, {"name": "PowerShell", "bytes": "2735024"}]}} {"text": "local GUI = {\n\t-- GUI Survival\n\t{type = 'header', text = 'Survival', align = 'center'},\n\t{type = 'checkbox', text = 'Enable Lay on Hands', key = 'S_LOHE', default = true},\n\t{type = 'spinner', text = '', key = 'S_LOH', default = 10},\n\t{type = 'checkbox', text = 'Enable Flash of Light', key = 'S_FOLE', default = true},\n\t{type = 'spinner', text = '', key = 'S_FOL', default = 40},\n\t{type = 'checkbox', text = 'Enable Shield of Vengeance', key = 'S_SOVE', default = true},\n\t{type = 'spinner', text = '', key = 'S_SOV', default = 75},\n\t{type = 'checkbox', text = 'Enable Eye for an Eye', key = 'S_EFAEE', default = true},\n\t{type = 'spinner', text = '', key = 'S_EFAE', default = 90},\n\t--{type = 'checkbox', text = 'Enable Every Man for Himself (Stun)', key = 'S_EMFHSE', default = true},\n\t--{type = 'checkbox', text = 'Enable Blessing of Freedom (Root)', key = 'S_BOFRE', default = true},\n\t--{type = 'checkbox', text = 'Enable Blessing of Freedom (Snare)', key = 'S_BOFSE', default = true},\n\t{type = 'checkbox', text = 'Enable Gift of the Naaru', key = 'S_GOTNE', default = true},\n\t{type = 'spinner', text = '', key = 'S_GOTN', default = 40},\n\t{type = 'checkbox', text = 'Enable Healthstone', key = 'S_HSE', default = true},\n\t{type = 'spinner', text = '', key = 'S_HS', default = 20},\n\t{type = 'checkbox', text = 'Enable Ancient Healing Potion', key = 'S_AHPE', default = true},\n\t{type = 'spinner', text = '', key = 'S_AHP', default = 20},\n\t{type = 'ruler'},{type = 'spacer'},\n\n\t-- GUI Emergency Group Assistance\n\t{type = 'header', text = 'Emergency Group Assistance', align = 'center'},\n\t{type = 'checkbox', text = 'Enable Emergency Group Assistance', key = 'E_GAE', default = false},\n\t{type = 'text', text = 'Flash of Light'},\n\t{type = 'spinner', text = '', key = 'E_FOL', default = 35},\n\t{type = 'text', text = 'Lay on Hands'},\n\t{type = 'spinner', text = '', key = 'E_LOH', default = 10},\n\t{type = 'text', text = 'Blessing of Protection'},\n\t{type = 'spinner', text = '', key = 'E_BOP', default = 10},\n\t{type = 'ruler'},{type = 'spacer'},\n\n\t-- GUI Blessings\n\t{type = 'header', text = 'Blessings', align = 'center'},\n\t{type = 'text', text = 'Check to enable blessings on yourself'},\n\t{type = 'checkbox', text = 'Blessing of Kings', key = 'B_BOKP', default = false},\n\t{type = 'checkbox', text = 'Blessing of Wisdom', key = 'B_BOWP', default = false},\n\t{type = 'ruler'},{type = 'spacer'},\n}\n\nlocal exeOnLoad = function()\n\t-- Rotation loaded message.\n\tprint('|cfff58cba ----------------------------------------------------------------------|r')\n\tprint('|cfff58cba --- |rPaladin: |cfff58cbaRETRIBUTION|r')\n\tprint('|cfff58cba --- |rTalents: 1/2 - 2/2 - 3/1 - 4/2 - 5/2 - 6/1 - 7/2|r')\n\tprint('|cfff58cba --- |rNo Multitarget (AoE) enabled settings|r')\n\tprint('|cfff58cba ----------------------------------------------------------------------|r')\n\tprint('|cffff0000 Configuration: |rRight-click the MasterToggle and go to Combat Routines Settings|r')\n\n\tNeP.Interface:AddToggle({\n\t\t-- Cleanse Toxin\n\t\tkey = 'yuCT',\n\t\tname = 'Cleanse Toxin',\n\t\ttext = 'Enable/Disable: Automatic removal of Poison and Diseases',\n\t\ticon = 'Interface\\\\ICONS\\\\spell_holy_renew',\n\t})\n\tNeP.Interface:AddToggle({\n\t\t-- Emergency Group Assistance\n\t\tkey = 'yuEGA',\n\t\tname = 'Emergency Group Assistance',\n\t\ttext = 'Enable/Disable: Automatic LoH/BoP/FoL on group members',\n\t\ticon = 'Interface\\\\ICONS\\\\ability_fiegndead',\n\t})\nend\n\nlocal Survival = {\n\t-- Lay on Hands usage if enabled in UI.\n\t{'&Lay on Hands', 'UI(S_LOHE)&{!player.debuff(Ignite Soul)}&player.health<=UI(S_LOH)'},\n\t-- Shield of Vengeance usage if enabled in UI.\n\t{'&Shield of Vengeance', 'UI(S_SOVE)&player.health<=UI(S_SOV)'},\n\t-- Eye for an Eye usage if enabled in UI.\n\t{'Eye for an Eye', 'talent(5,2)&UI(S_EFAEE)&player.health<=UI(S_EFAE)'},\n\t-- Every Man for Himself usage if enabled in UI.\n\t--{'&Every Man for Himself', 'UI(S_EMFHSE)&player.state(stun)'},\n\t-- Blessing of Freedom usage if enabled in UI.\n\t--{'!Blessing of Freedom', 'UI(S_BOFRE)&player.state(root)'},\n\t-- Blessing of Freedom usage if enabled in UI.\n\t--{'!Blessing of Freedom', 'UI(S_BOFSE)&player.state(snare)'},\n\t-- Gift of the Naaru usage if enabled in UI.\n\t{'&Gift of the Naaru', 'UI(S_GOTNE)&{!player.debuff(Ignite Soul)}&player.health<=UI(S_GOTN)'},\n\t-- Healthstone usage if enabled in UI.\n\t{'#5512', 'UI(S_HSE)&{!player.debuff(Ignite Soul)}&player.health<=UI(S_HS)'},\n\t-- Ancient Healing Potion usage if enabled in UI.\n\t{'#127834', 'UI(S_AHPE)&{!player.debuff(Ignite Soul)}&player.health<=UI(S_AHP)'},\n}\n\nlocal Player = {\n\t-- Flash of Light usage if enabled in UI.\n\t{'!Flash of Light', 'UI(S_FOLE)&{!player.debuff(Ignite Soul)}&player.health<=UI(S_FOL)', 'player'},\n}\n\nlocal Emergency = {\n\t-- Flash of Light usage if enabled in UI.\n\t{'!Flash of Light', 'UI(E_GAE)&{!lowest.debuff(Ignite Soul)}&lowest.health<=UI(E_FOL)', 'lowest'},\n\t-- Lay on Hands usage if enabled in UI.\n\t{'!Lay on Hands', 'UI(E_GAE)&{!lowest.debuff(Ignite Soul)}&lowest.health<=UI(E_LOH)', 'lowest'},\n\t-- Blessing of Protection usage if enabled in UI.\n\t{'!Blessing of Protection', 'UI(E_GAE)&{!lowest.debuff(Ignite Soul)}&lowest.health<=UI(E_BOP)', 'lowest'},\n}\n\nlocal Interrupts = {\n\t{'&Rebuke', 'target.range<=5'},\n\t{'Hammer of Justice', '!equipped(Justice Gaze)&target.range<=10&spell(Rebuke).cooldown>gcd&!lastgcd(Rebuke)'},\n\t{'Hammer of Justice', 'equipped(Justice Gaze)&target.health>=75&target.range<=10&spell(Rebuke).cooldown>gcd&!lastgcd(Rebuke)'},\n\t{'Blinding Light', 'talent(3,3)&target.range<=10&spell(Rebuke).cooldown>gcd&!lastgcd(Rebuke)'},\n\t{'&Arcane Torrent', 'target.range<=8&spell(Rebuke).cooldown>gcd&!lastgcd(Rebuke)'},\n}\n\nlocal Dispel = {\n\t{'%dispelself'},\n}\n\nlocal Blessings = {\n\t{'Greater Blessing of Kings', 'UI(B_BOKP)&!player.buff(Greater Blessing of Kings)', 'player'},\n\t{'Greater Blessing of Wisdom', 'UI(B_BOWP)&!player.buff(Greater Blessing of Wisdom)', 'player'},\n}\n\n-- ####################################################################################\n-- Primairly sourced from legion-dev SimC.\n-- Updates to rotations from sources are considered for implementation.\n-- ####################################################################################\n\n-- SimC APL 4/20/2017\n-- https://github.com/simulationcraft/simc/blob/legion-dev/profiles/Tier19M/Paladin_Retribution_T19M.simc\n\nlocal Cooldowns = {\n\t--actions+=/arcane_torrent,if=holy_power<5&(buff.crusade.up|buff.avenging_wrath.up|time<2)\n\t{'&Arcane Torrent', 'holypower<5&{player.buff(Crusade)||player.buff(Avenging Wrath)||combat(player).time<2}'},\n\t--actions+=/holy_wrath\n\t{'Holy Wrath', 'talent(7,3)'},\n\t--actions+=/avenging_wrath\n\t{'&Avenging Wrath', '!talent(7,2)'},\n\t--actions+=/crusade,if=holy_power>=5&!equipped.137048|((equipped.137048|race.blood_elf)&holy_power>=2)\n\t{'&Crusade', 'talent(7,2)&{holypower>=5&!equipped(Liadrin\\'s Fury Unleashed)||{equipped(Liadrin\\'s Fury Unleashed)&holypower>=2}}'},\n}\n\nlocal Combat = {\n\t{'/startattack', '!isattacking'},\n\t--actions+=/judgment,if=time<2\n\t{'Judgment', 'combat(player).time<2'},\n\t--actions+=/blade_of_justice,if=time<2&(equipped.137048|race.blood_elf)\n\t{'Blade of Justice', 'combat(player).time<2&equipped(Liadrin\\'s Fury Unleashed)'},\n\t--actions+=/divine_hammer,if=time<2&(equipped.137048|race.blood_elf)\n\t{'Divine Hammer', 'talent(4,3)&combat(player).time<2&equipped(Liadrin\\'s Fury Unleashed)'},\n\t--actions+=/wake_of_ashes,if=holy_power<=1&time<2\n\t{'Wake of Ashes', 'holypower<=1&combat(player).time<2'},\n\t--actions+=/execution_sentence,if=spell_targets.divine_storm<=3&(cooldown.judgment.remainsgcd*4.5)&(!talent.crusade.enabled|cooldown.crusade.remains>gcd*2)\n\t{'Execution Sentence','talent(1,2)&player.area(8).enemies<=3&{spell(Judgment).cooldowngcd*4.5}&{!talent(7,2)||talent(7,2)&!toggle(cooldowns)||spell(Crusade).cooldown>gcd*2}'},\n\t--actions+=/divine_storm,if=debuff.judgment.up&spell_targets.divine_storm>=2&buff.divine_purpose.up&buff.divine_purpose.remains=2&player.buff(Divine Purpose).duration=2&holy_power>=5&buff.divine_purpose.react\n\t{'Divine Storm', 'target.debuff(Judgment)&player.area(8).enemies>=2&holypower>=5&player.buff(Divine Purpose)'},\n\t--actions+=/divine_storm,if=debuff.judgment.up&spell_targets.divine_storm>=2&holy_power>=3&(buff.crusade.up&(buff.crusade.stack<15|buff.bloodlust.up)|buff.liadrins_fury_unleashed.up)\n\t{'Divine Storm', 'target.debuff(Judgment)&player.area(8).enemies>=2&holypower>=3&{player.buff(Crusade)&{player.buff(Crusade).count<15||hashero}||player.buff(Liadrin\\'s Fury Unleashed)}'},\n\t--actions+=/divine_storm,if=debuff.judgment.up&spell_targets.divine_storm>=2&holy_power>=5&(!talent.crusade.enabled|cooldown.crusade.remains>gcd*3)\n\t{'Divine Storm', 'target.debuff(Judgment)&player.area(8).enemies>=2&holypower>=5&{!talent(7,2)||talent(7,2)&!toggle(cooldowns)||spell(Crusade).cooldown>gcd*3}'},\n\t--actions+=/templars_verdict,if=debuff.judgment.up&buff.divine_purpose.up&buff.divine_purpose.remains=5&buff.divine_purpose.react\n\t{'Templar\\'s Verdict', 'target.debuff(Judgment)&holypower>=5&player.buff(Divine Purpose)'},\n\t--actions+=/templars_verdict,if=debuff.judgment.up&holy_power>=3&(buff.crusade.up&(buff.crusade.stack<15|buff.bloodlust.up)|buff.liadrins_fury_unleashed.up)\n\t{'Templar\\'s Verdict', 'target.debuff(Judgment)&holypower>=3&{player.buff(Crusade)&{player.buff(Crusade).count<15||hashero}||player.buff(Liadrin\\'s Fury Unleashed)}'},\n\t--actions+=/templars_verdict,if=debuff.judgment.up&holy_power>=5&(!talent.crusade.enabled|cooldown.crusade.remains>gcd*3)&(!talent.execution_sentence.enabled|cooldown.execution_sentence.remains>gcd)\n\t{'Templar\\'s Verdict', 'target.debuff(Judgment)&holypower>=5&{!talent(7,2)||talent(7,2)&!toggle(cooldowns)||spell(Crusade).cooldown>gcd*3}&{!talent(1,2)||spell(Execution Sentence).cooldown>gcd}'},\n\t--actions+=/divine_storm,if=debuff.judgment.up&holy_power>=3&spell_targets.divine_storm>=2&(cooldown.wake_of_ashes.remainsgcd*4)\n\t{'Divine Storm', 'target.debuff(Judgment)&holypower>=3&player.area(8).enemies>=2&{spell(Wake of Ashes).cooldowngcd*4}'},\n\t--actions+=/templars_verdict,if=debuff.judgment.up&holy_power>=3&(cooldown.wake_of_ashes.remainsgcd*4)\n\t{'Templar\\'s Verdict', 'target.debuff(Judgment)&holypower>=3&{spell(Wake of Ashes).cooldowngcd*4}'},\n\t--actions+=/judgment,if=dot.execution_sentence.ticking&dot.execution_sentence.remains15)&(holy_power=0|holy_power=1&(cooldown.blade_of_justice.remains>gcd|cooldown.divine_hammer.remains>gcd)|holy_power=2&(cooldown.zeal.charges_fractional<=0.65|cooldown.crusader_strike.charges_fractional<=0.65))\n\t{'Wake of Ashes', 'holypower=0||holypower=1&{spell(Blade of Justice).cooldown>gcd||spell(Divine Hammer).cooldown>gcd}||holypower=2&{spell(Zeal).charges<=0.65||spell(Crusader Strike).charges<=0.65}'},\n\t--actions+=/blade_of_justice,if=(holy_power<=2&set_bonus.tier20_2pc=1|holy_power<=3&set_bonus.tier20_2pc=0)\n\t{'Blade of Justice', 'holypower<=2&set_bonus(T20)>=2||holypower<=3&set_bonus(T20)=0'},\n\t--actions+=/divine_hammer,if=(holy_power<=2&set_bonus.tier20_2pc=1|holy_power<=3&set_bonus.tier20_2pc=0)\n\t{'Divine Hammer', 'holypower<=2&set_bonus(T20)>=2||holypower<=3&set_bonus(T20)=0'},\n\t--actions+=/hammer_of_justice,if=equipped.137065&target.health.pct>=75&holy_power<=4\n\t{'Hammer of Justice', 'equipped(Justice Gaze)&target.health>=75&holypower<=4'},\n\t--actions+=/judgment\n\t{'Judgment'},\n\t--actions+=/zeal,if=charges=2&(set_bonus.tier20_2pc=0&holy_power<=2|(holy_power<=4&(cooldown.divine_hammer.remains>gcd*2|cooldown.blade_of_justice.remains>gcd*2)&cooldown.judgment.remains>gcd*2))|(set_bonus.tier20_2pc=1&holy_power<=1|(holy_power<=4&(cooldown.divine_hammer.remains>gcd*2|cooldown.blade_of_justice.remains>gcd*2)&cooldown.judgment.remains>gcd*2))\n\t{'Zeal', 'talent(2,2)&spell(Zeal).charges=2&{set_bonus(T20)=0&holypower<=2||{holypower<=4&{spell(Divine Hammer).cooldown>gcd*2||spell(Blade of Justice).cooldown>gcd*2}&spell(Judgment).cooldown>gcd*2}}||{set_bonus(T20)>=2&holypower<=1||{holypower<=4&{spell(Divine Hammer).cooldown>gcd*2||spell(Blade of Justice).cooldown>gcd*2}&spell(Judgment).cooldown>gcd*2}}'},\n\t--actions+=/crusader_strike,if=charges=2&(set_bonus.tier20_2pc=0&holy_power<=2|(holy_power<=4&(cooldown.divine_hammer.remains>gcd*2|cooldown.blade_of_justice.remains>gcd*2)&cooldown.judgment.remains>gcd*2))|(set_bonus.tier20_2pc=1&holy_power<=1|(holy_power<=4&(cooldown.divine_hammer.remains>gcd*2|cooldown.blade_of_justice.remains>gcd*2)&cooldown.judgment.remains>gcd*2))\n\t{'Crusader Strike', '!talent(2,2)&spell(Crusader Strike).charges=2&{set_bonus(T20)=0&holypower<=2||{holypower<=4&{spell(Divine Hammer).cooldown>gcd*2||spell(Blade of Justice).cooldown>gcd*2}&spell(Judgment).cooldown>gcd*2}}||{set_bonus(T20)>=2&holypower<=1||{holypower<=4&{spell(Divine Hammer).cooldown>gcd*2||spell(Blade of Justice).cooldown>gcd*2}&spell(Judgment).cooldown>gcd*2}}'},\n\t--actions+=/consecration\n\t{'Consecration', 'talent(1,3)'},\n\t--actions+=/divine_storm,if=debuff.judgment.up&spell_targets.divine_storm>=2&buff.divine_purpose.react\n\t{'Divine Storm', 'target.debuff(Judgment)&player.area(8).enemies>=2&player.buff(Divine Purpose)'},\n\t--actions+=/divine_storm,if=debuff.judgment.up&spell_targets.divine_storm>=2&buff.the_fires_of_justice.react&(!talent.crusade.enabled|cooldown.crusade.remains>gcd*3)\n\t{'Divine Storm', 'target.debuff(Judgment)&player.area(8).enemies>=2&player.buff(The Fires of Justice)&{!talent(7,2)||talent(7,2)&!toggle(cooldowns)||spell(Crusade).cooldown>gcd*3}'},\n\t--actions+=/divine_storm,if=debuff.judgment.up&spell_targets.divine_storm>=2&holy_power>=4&(!talent.crusade.enabled|cooldown.crusade.remains>gcd*4)\n\t{'Divine Storm', 'target.debuff(Judgment)&player.area(8).enemies>=2&holypower>=4&{!talent(7,2)||talent(7,2)&!toggle(cooldowns)||spell(Crusade).cooldown>gcd*4}'},\n\t--actions+=/templars_verdict,if=debuff.judgment.up&buff.divine_purpose.react\n\t{'Templar\\'s Verdict', 'target.debuff(Judgment)&player.buff(Divine Purpose)'},\n\t--actions+=/templars_verdict,if=debuff.judgment.up&buff.the_fires_of_justice.react&(!talent.crusade.enabled|cooldown.crusade.remains>gcd*3)\n\t{'Templar\\'s Verdict', 'target.debuff(Judgment)&player.buff(The Fires of Justice)&{!talent(7,2)||talent(7,2)&!toggle(cooldowns)||spell(Crusade).cooldown>gcd*3}'},\n\t--actions+=/templars_verdict,if=debuff.judgment.up&holy_power>=4&(!talent.crusade.enabled|cooldown.crusade.remains>gcd*4)&(!talent.execution_sentence.enabled|cooldown.execution_sentence.remains>gcd*2)\n\t{'Templar\\'s Verdict', 'target.debuff(Judgment)&holypower>=4&{!talent(7,2)||talent(7,2)&!toggle(cooldowns)||spell(Crusade).cooldown>gcd*4}&{!talent(1,2)||spell(Execution Sentence).cooldown>gcd*2}'},\n\t--actions+=/zeal,if=holy_power<=4\n\t{'Zeal', 'talent(2,2)&holypower<=4'},\n\t--actions+=/crusader_strike,if=holy_power<=4\n\t{'Crusader Strike', '!talent(2,2)&holypower<=4'},\n\t--actions+=/divine_storm,if=debuff.judgment.up&holy_power>=3&spell_targets.divine_storm>=2&(!talent.crusade.enabled|cooldown.crusade.remains>gcd*5)\n\t{'Divine Storm', 'target.debuff(Judgment)&holypower>=3&player.area(8).enemies>=2&{!talent(7,2)||talent(7,2)&!toggle(cooldowns)||spell(Crusade).cooldown>gcd*5}'},\n\t--actions+=/templars_verdict,if=debuff.judgment.up&holy_power>=3&(!talent.crusade.enabled|cooldown.crusade.remains>gcd*5)\n\t{'Templar\\'s Verdict', 'target.debuff(Judgment)&holypower>=3&{!talent(7,2)||talent(7,2)&!toggle(cooldowns)||spell(Crusade).cooldown>gcd*5}'},\n}\n\nlocal inCombat = {\n\t{Dispel, '{!moving||moving}&toggle(yuCT)&spell(Cleanse Toxins).cooldown=0'},\n\t{Survival, '{!moving||moving}'},\n\t{Blessings, '{!moving||moving}'},\n\t{Player, '!moving&{!ingroup||ingroup}'},\n\t{Emergency, '!moving&ingroup&toggle(yuEGA)'},\n\t{Interrupts, '{!moving||moving}&toggle(interrupts)&target.interruptAt(70)&target.infront'},\n\t{Cooldowns, '{!moving||moving}&toggle(cooldowns)'},\n\t{Combat, '{!moving||moving}&target.infront&target.range<=8'},\n}\n\nlocal outCombat = {\n\t{Dispel, '{!moving||moving}&toggle(yuCT)&spell(Cleanse Toxins).cooldown=0'},\n\t{Interrupts, '{!moving||moving}&toggle(interrupts)&target.interruptAt(70)&target.infront'},\n\t{Blessings, '{!moving||moving}'},\n\t{Emergency, '!moving&ingroup&toggle(yuEGA)'},\n\t{'Flash of Light', '!moving&player.health<=70', 'player'},\n}\n\nNeP.CR:Add(70, {\n\tname = '|r[|cff00fff0Yumad|r] |cfff58cbaPaladin|r - |cfff58cbaRETRIBUTION|r',\n\tic = inCombat,\n\tooc = outCombat,\n\tgui = GUI,\n\tload = exeOnLoad\n})\n", "meta": {"content_hash": "c7616b79a3240f3cbe1e3df5e9f507c4", "timestamp": "", "source": "github", "line_count": 235, "max_line_length": 386, "avg_line_length": 74.63404255319149, "alnum_prop": 0.6929699526768915, "repo_name": "damuY/NerdPack-Yumad", "id": "98f0d77faae8519aed20647b8c750d214e2ff153", "size": "17539", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rotations/paladin/retribution.lua", "mode": "33188", "license": "mit", "language": [{"name": "Lua", "bytes": "105029"}]}} {"text": "import time\n\nimport game\nimport instances\nimport palette\nimport registry\n\nfrom entities import entity\nfrom entities.creatures import player\nfrom statuses import hastestatus\n\n\nclass TwitchChatManager(entity.Entity):\n special_viewers = []\n\n def __init__(self):\n super().__init__(' ')\n self.hidden = True\n self.time_since_last_help_message = 0\n TwitchChatManager.special_viewers = [viewer.lower() for viewer in game.Game.config['TWITCH']['SpecialViewers'].split('\\n') if viewer]\n\n def get_config_color(self, key):\n return tuple([int(d) for d in game.Game.config['TWITCH'][key].strip(' ').split(',')])\n\n def handle_events(self, event):\n current_scene = instances.scene_root\n\n if event.type == 'TWITCHCHATMESSAGE':\n if event.message:\n if event.message.upper() == '!JOIN':\n player_names = [e.name for e in current_scene.children if hasattr(e, 'name')]\n\n bonus = None\n\n if not event.nickname in player_names:\n # Set player color\n if 'broadcaster' in event.tags['badges']:\n try:\n player_color = self.get_config_color('BroadcasterColor')\n\n except:\n player_color = palette.get_nearest((255, 163, 0))\n\n elif event.tags['subscriber'] != '0':\n try:\n player_color = self.get_config_color('SubscriberColor')\n\n except:\n player_color = palette.BRIGHT_BLUE\n\n bonus = registry.Registry.get('weapon')()\n\n elif event.nickname.lower() in TwitchChatManager.special_viewers:\n try:\n player_color = self.get_config_color('SpecialViewerColor')\n\n except:\n player_color = palette.BRIGHT_RED\n\n else:\n try:\n player_color = self.get_config_color('ViewerColor')\n\n except:\n player_color = palette.get_nearest((255, 163, 0))\n\n # Add player\n pos = current_scene.get_location_near_stairs()\n p = player.Player(event.nickname[0], pos, fg=player_color)\n p.name = event.nickname\n\n if bonus:\n p.equip_weapon(bonus)\n\n current_scene.append(p)\n instances.console.print('{} has joined!'.format(p.display_string))\n\n elif event.message.upper() == '!LEAVE':\n for e in current_scene.children:\n if not e.isinstance('Player'):\n continue\n\n if e.name == event.nickname:\n e.die()\n instances.console.print('{} has left.'.format(e.display_string))\n\n elif event.message.upper().startswith('!CHEER'):\n s = event.message.split(' ')\n if len(s) <= 1:\n return\n\n player_names = [p.name for p in instances.scene_root.players if p.state != 'PlayerExitedState']\n if event.nickname in player_names:\n return\n\n player_name = s[1].lower()\n if player_name[0] == '@':\n player_name = player_name[1:]\n\n target_player = [p for p in instances.scene_root.players if p.state != 'PlayerExitedState' and p.name == player_name]\n target_player = target_player[0] if target_player else None\n if target_player:\n target_player.add_status(hastestatus.HasteStatus(target_player))\n\n elif event.message.upper() == '!HELP':\n current_time = time.time()\n if current_time - self.time_since_last_help_message > 30:\n help_message = 'Available commands: !join !leave !move [uldr] !move @username !stop !attack [uldr] !throw [uldr] !drop !cheer @username'\n instances.game.observer.send_message(help_message, instances.game.channel)\n self.time_since_last_help_message = current_time\n\n", "meta": {"content_hash": "eca08351fb6487a47772453eacbb8a2c", "timestamp": "", "source": "github", "line_count": 111, "max_line_length": 160, "avg_line_length": 41.2972972972973, "alnum_prop": 0.48276614310645727, "repo_name": "JoshuaSkelly/lunch-break-rl", "id": "ef49018a54b21a5b06d0e69a64e788b92e4ecddc", "size": "4584", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "twitchchatmanager.py", "mode": "33188", "license": "mit", "language": [{"name": "Python", "bytes": "69311"}]}} {"text": "namespace {\n\nusing google_breakpad::Minidump;\nusing google_breakpad::MinidumpContext;\nusing google_breakpad::MinidumpException;\nusing google_breakpad::MinidumpMemoryInfo;\nusing google_breakpad::MinidumpMemoryInfoList;\nusing google_breakpad::MinidumpMemoryList;\nusing google_breakpad::MinidumpMemoryRegion;\nusing google_breakpad::MinidumpModule;\nusing google_breakpad::MinidumpModuleList;\nusing google_breakpad::MinidumpSystemInfo;\nusing google_breakpad::MinidumpThread;\nusing google_breakpad::MinidumpThreadList;\nusing google_breakpad::SynthMinidump::Context;\nusing google_breakpad::SynthMinidump::Dump;\nusing google_breakpad::SynthMinidump::Exception;\nusing google_breakpad::SynthMinidump::Memory;\nusing google_breakpad::SynthMinidump::Module;\nusing google_breakpad::SynthMinidump::Stream;\nusing google_breakpad::SynthMinidump::String;\nusing google_breakpad::SynthMinidump::SystemInfo;\nusing google_breakpad::SynthMinidump::Thread;\nusing google_breakpad::test_assembler::kBigEndian;\nusing google_breakpad::test_assembler::kLittleEndian;\nusing std::ifstream;\nusing std::istringstream;\nusing std::vector;\nusing ::testing::Return;\n\nclass MinidumpTest : public ::testing::Test {\npublic:\n void SetUp() {\n minidump_file_ = string(getenv(\"srcdir\") ? getenv(\"srcdir\") : \".\") +\n \"/src/processor/testdata/minidump2.dmp\";\n }\n string minidump_file_;\n};\n\nTEST_F(MinidumpTest, TestMinidumpFromFile) {\n Minidump minidump(minidump_file_);\n ASSERT_EQ(minidump.path(), minidump_file_);\n ASSERT_TRUE(minidump.Read());\n const MDRawHeader* header = minidump.header();\n ASSERT_NE(header, (MDRawHeader*)NULL);\n ASSERT_EQ(header->signature, u_int32_t(MD_HEADER_SIGNATURE));\n //TODO: add more checks here\n}\n\nTEST_F(MinidumpTest, TestMinidumpFromStream) {\n // read minidump contents into memory, construct a stringstream around them\n ifstream file_stream(minidump_file_.c_str(), std::ios::in);\n ASSERT_TRUE(file_stream.good());\n vector bytes;\n file_stream.seekg(0, std::ios_base::end);\n ASSERT_TRUE(file_stream.good());\n bytes.resize(file_stream.tellg());\n file_stream.seekg(0, std::ios_base::beg);\n ASSERT_TRUE(file_stream.good());\n file_stream.read(&bytes[0], bytes.size());\n ASSERT_TRUE(file_stream.good());\n string str(&bytes[0], bytes.size());\n istringstream stream(str);\n ASSERT_TRUE(stream.good());\n\n // now read minidump from stringstream\n Minidump minidump(stream);\n ASSERT_EQ(minidump.path(), \"\");\n ASSERT_TRUE(minidump.Read());\n const MDRawHeader* header = minidump.header();\n ASSERT_NE(header, (MDRawHeader*)NULL);\n ASSERT_EQ(header->signature, u_int32_t(MD_HEADER_SIGNATURE));\n //TODO: add more checks here\n}\n\nTEST(Dump, ReadBackEmpty) {\n Dump dump(0);\n dump.Finish();\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n istringstream stream(contents);\n Minidump minidump(stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(0U, minidump.GetDirectoryEntryCount());\n}\n\nTEST(Dump, ReadBackEmptyBigEndian) {\n Dump big_minidump(0, kBigEndian);\n big_minidump.Finish();\n string contents;\n ASSERT_TRUE(big_minidump.GetContents(&contents));\n istringstream stream(contents);\n Minidump minidump(stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(0U, minidump.GetDirectoryEntryCount());\n}\n\nTEST(Dump, OneStream) {\n Dump dump(0, kBigEndian);\n Stream stream(dump, 0xfbb7fa2bU);\n stream.Append(\"stream contents\");\n dump.Add(&stream);\n dump.Finish();\n \n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(1U, minidump.GetDirectoryEntryCount());\n\n const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0);\n ASSERT_TRUE(dir != NULL);\n EXPECT_EQ(0xfbb7fa2bU, dir->stream_type);\n\n u_int32_t stream_length;\n ASSERT_TRUE(minidump.SeekToStreamType(0xfbb7fa2bU, &stream_length));\n ASSERT_EQ(15U, stream_length);\n char stream_contents[15];\n ASSERT_TRUE(minidump.ReadBytes(stream_contents, sizeof(stream_contents)));\n EXPECT_EQ(string(\"stream contents\"),\n string(stream_contents, sizeof(stream_contents)));\n\n EXPECT_FALSE(minidump.GetThreadList());\n EXPECT_FALSE(minidump.GetModuleList());\n EXPECT_FALSE(minidump.GetMemoryList());\n EXPECT_FALSE(minidump.GetException());\n EXPECT_FALSE(minidump.GetAssertion());\n EXPECT_FALSE(minidump.GetSystemInfo());\n EXPECT_FALSE(minidump.GetMiscInfo());\n EXPECT_FALSE(minidump.GetBreakpadInfo());\n}\n\nTEST(Dump, OneMemory) {\n Dump dump(0, kBigEndian);\n Memory memory(dump, 0x309d68010bd21b2cULL);\n memory.Append(\"memory contents\");\n dump.Add(&memory);\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(1U, minidump.GetDirectoryEntryCount());\n\n const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0);\n ASSERT_TRUE(dir != NULL);\n EXPECT_EQ((u_int32_t) MD_MEMORY_LIST_STREAM, dir->stream_type);\n\n MinidumpMemoryList *memory_list = minidump.GetMemoryList();\n ASSERT_TRUE(memory_list != NULL);\n ASSERT_EQ(1U, memory_list->region_count());\n\n MinidumpMemoryRegion *region1 = memory_list->GetMemoryRegionAtIndex(0);\n ASSERT_EQ(0x309d68010bd21b2cULL, region1->GetBase());\n ASSERT_EQ(15U, region1->GetSize());\n const u_int8_t *region1_bytes = region1->GetMemory();\n ASSERT_TRUE(memcmp(\"memory contents\", region1_bytes, 15) == 0);\n}\n\n// One thread --- and its requisite entourage.\nTEST(Dump, OneThread) {\n Dump dump(0, kLittleEndian);\n Memory stack(dump, 0x2326a0fa);\n stack.Append(\"stack for thread\");\n\n MDRawContextX86 raw_context;\n const u_int32_t kExpectedEIP = 0x6913f540;\n raw_context.context_flags = MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL;\n raw_context.edi = 0x3ecba80d;\n raw_context.esi = 0x382583b9;\n raw_context.ebx = 0x7fccc03f;\n raw_context.edx = 0xf62f8ec2;\n raw_context.ecx = 0x46a6a6a8;\n raw_context.eax = 0x6a5025e2;\n raw_context.ebp = 0xd9fabb4a;\n raw_context.eip = kExpectedEIP;\n raw_context.cs = 0xbffe6eda;\n raw_context.eflags = 0xb2ce1e2d;\n raw_context.esp = 0x659caaa4;\n raw_context.ss = 0x2e951ef7;\n Context context(dump, raw_context);\n \n Thread thread(dump, 0xa898f11b, stack, context,\n 0x9e39439f, 0x4abfc15f, 0xe499898a, 0x0d43e939dcfd0372ULL);\n \n dump.Add(&stack);\n dump.Add(&context);\n dump.Add(&thread);\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(2U, minidump.GetDirectoryEntryCount());\n\n MinidumpMemoryList *md_memory_list = minidump.GetMemoryList();\n ASSERT_TRUE(md_memory_list != NULL);\n ASSERT_EQ(1U, md_memory_list->region_count());\n\n MinidumpMemoryRegion *md_region = md_memory_list->GetMemoryRegionAtIndex(0);\n ASSERT_EQ(0x2326a0faU, md_region->GetBase());\n ASSERT_EQ(16U, md_region->GetSize());\n const u_int8_t *region_bytes = md_region->GetMemory();\n ASSERT_TRUE(memcmp(\"stack for thread\", region_bytes, 16) == 0);\n\n MinidumpThreadList *thread_list = minidump.GetThreadList();\n ASSERT_TRUE(thread_list != NULL);\n ASSERT_EQ(1U, thread_list->thread_count());\n\n MinidumpThread *md_thread = thread_list->GetThreadAtIndex(0);\n ASSERT_TRUE(md_thread != NULL);\n u_int32_t thread_id;\n ASSERT_TRUE(md_thread->GetThreadID(&thread_id));\n ASSERT_EQ(0xa898f11bU, thread_id);\n MinidumpMemoryRegion *md_stack = md_thread->GetMemory();\n ASSERT_TRUE(md_stack != NULL);\n ASSERT_EQ(0x2326a0faU, md_stack->GetBase());\n ASSERT_EQ(16U, md_stack->GetSize());\n const u_int8_t *md_stack_bytes = md_stack->GetMemory();\n ASSERT_TRUE(memcmp(\"stack for thread\", md_stack_bytes, 16) == 0);\n\n MinidumpContext *md_context = md_thread->GetContext();\n ASSERT_TRUE(md_context != NULL);\n ASSERT_EQ((u_int32_t) MD_CONTEXT_X86, md_context->GetContextCPU());\n\n u_int64_t eip;\n ASSERT_TRUE(md_context->GetInstructionPointer(&eip));\n EXPECT_EQ(kExpectedEIP, eip);\n\n const MDRawContextX86 *md_raw_context = md_context->GetContextX86();\n ASSERT_TRUE(md_raw_context != NULL);\n ASSERT_EQ((u_int32_t) (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL),\n (md_raw_context->context_flags\n & (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL)));\n EXPECT_EQ(0x3ecba80dU, raw_context.edi);\n EXPECT_EQ(0x382583b9U, raw_context.esi);\n EXPECT_EQ(0x7fccc03fU, raw_context.ebx);\n EXPECT_EQ(0xf62f8ec2U, raw_context.edx);\n EXPECT_EQ(0x46a6a6a8U, raw_context.ecx);\n EXPECT_EQ(0x6a5025e2U, raw_context.eax);\n EXPECT_EQ(0xd9fabb4aU, raw_context.ebp);\n EXPECT_EQ(kExpectedEIP, raw_context.eip);\n EXPECT_EQ(0xbffe6edaU, raw_context.cs);\n EXPECT_EQ(0xb2ce1e2dU, raw_context.eflags);\n EXPECT_EQ(0x659caaa4U, raw_context.esp);\n EXPECT_EQ(0x2e951ef7U, raw_context.ss);\n}\n\nTEST(Dump, ThreadMissingMemory) {\n Dump dump(0, kLittleEndian);\n Memory stack(dump, 0x2326a0fa);\n // Stack has no contents.\n\n MDRawContextX86 raw_context;\n memset(&raw_context, 0, sizeof(raw_context));\n raw_context.context_flags = MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL;\n Context context(dump, raw_context);\n\n Thread thread(dump, 0xa898f11b, stack, context,\n 0x9e39439f, 0x4abfc15f, 0xe499898a, 0x0d43e939dcfd0372ULL);\n\n dump.Add(&stack);\n dump.Add(&context);\n dump.Add(&thread);\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(2U, minidump.GetDirectoryEntryCount());\n\n // This should succeed even though the thread has no stack memory.\n MinidumpThreadList* thread_list = minidump.GetThreadList();\n ASSERT_TRUE(thread_list != NULL);\n ASSERT_EQ(1U, thread_list->thread_count());\n\n MinidumpThread* md_thread = thread_list->GetThreadAtIndex(0);\n ASSERT_TRUE(md_thread != NULL);\n\n u_int32_t thread_id;\n ASSERT_TRUE(md_thread->GetThreadID(&thread_id));\n ASSERT_EQ(0xa898f11bU, thread_id);\n\n MinidumpContext* md_context = md_thread->GetContext();\n ASSERT_NE(reinterpret_cast(NULL), md_context);\n\n MinidumpMemoryRegion* md_stack = md_thread->GetMemory();\n ASSERT_EQ(reinterpret_cast(NULL), md_stack);\n}\n\nTEST(Dump, ThreadMissingContext) {\n Dump dump(0, kLittleEndian);\n Memory stack(dump, 0x2326a0fa);\n stack.Append(\"stack for thread\");\n\n // Context is empty.\n Context context(dump);\n\n Thread thread(dump, 0xa898f11b, stack, context,\n 0x9e39439f, 0x4abfc15f, 0xe499898a, 0x0d43e939dcfd0372ULL);\n\n dump.Add(&stack);\n dump.Add(&context);\n dump.Add(&thread);\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(2U, minidump.GetDirectoryEntryCount());\n\n // This should succeed even though the thread has no stack memory.\n MinidumpThreadList* thread_list = minidump.GetThreadList();\n ASSERT_TRUE(thread_list != NULL);\n ASSERT_EQ(1U, thread_list->thread_count());\n\n MinidumpThread* md_thread = thread_list->GetThreadAtIndex(0);\n ASSERT_TRUE(md_thread != NULL);\n\n u_int32_t thread_id;\n ASSERT_TRUE(md_thread->GetThreadID(&thread_id));\n ASSERT_EQ(0xa898f11bU, thread_id);\n MinidumpMemoryRegion* md_stack = md_thread->GetMemory();\n ASSERT_NE(reinterpret_cast(NULL), md_stack);\n\n MinidumpContext* md_context = md_thread->GetContext();\n ASSERT_EQ(reinterpret_cast(NULL), md_context);\n}\n\nTEST(Dump, OneModule) {\n static const MDVSFixedFileInfo fixed_file_info = {\n 0xb2fba33a, // signature\n 0x33d7a728, // struct_version\n 0x31afcb20, // file_version_hi\n 0xe51cdab1, // file_version_lo\n 0xd1ea6907, // product_version_hi\n 0x03032857, // product_version_lo\n 0x11bf71d7, // file_flags_mask\n 0x5fb8cdbf, // file_flags\n 0xe45d0d5d, // file_os\n 0x107d9562, // file_type\n 0x5a8844d4, // file_subtype\n 0xa8d30b20, // file_date_hi\n 0x651c3e4e // file_date_lo\n };\n\n Dump dump(0, kBigEndian);\n String module_name(dump, \"single module\");\n Module module(dump, 0xa90206ca83eb2852ULL, 0xada542bd,\n module_name,\n 0xb1054d2a,\n 0x34571371,\n fixed_file_info, // from synth_minidump_unittest_data.h\n NULL, NULL);\n\n dump.Add(&module);\n dump.Add(&module_name);\n dump.Finish();\n \n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(1U, minidump.GetDirectoryEntryCount());\n\n const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0);\n ASSERT_TRUE(dir != NULL);\n EXPECT_EQ((u_int32_t) MD_MODULE_LIST_STREAM, dir->stream_type);\n\n MinidumpModuleList *md_module_list = minidump.GetModuleList();\n ASSERT_TRUE(md_module_list != NULL);\n ASSERT_EQ(1U, md_module_list->module_count());\n\n const MinidumpModule *md_module = md_module_list->GetModuleAtIndex(0);\n ASSERT_TRUE(md_module != NULL);\n ASSERT_EQ(0xa90206ca83eb2852ULL, md_module->base_address());\n ASSERT_EQ(0xada542bd, md_module->size());\n ASSERT_EQ(\"single module\", md_module->code_file());\n\n const MDRawModule *md_raw_module = md_module->module();\n ASSERT_TRUE(md_raw_module != NULL);\n ASSERT_EQ(0xb1054d2aU, md_raw_module->time_date_stamp);\n ASSERT_EQ(0x34571371U, md_raw_module->checksum);\n ASSERT_TRUE(memcmp(&md_raw_module->version_info, &fixed_file_info,\n sizeof(fixed_file_info)) == 0);\n}\n\nTEST(Dump, OneSystemInfo) {\n Dump dump(0, kLittleEndian);\n String csd_version(dump, \"Petulant Pierogi\");\n SystemInfo system_info(dump, SystemInfo::windows_x86, csd_version);\n\n dump.Add(&system_info);\n dump.Add(&csd_version);\n dump.Finish();\n \n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(1U, minidump.GetDirectoryEntryCount());\n\n const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0);\n ASSERT_TRUE(dir != NULL);\n EXPECT_EQ((u_int32_t) MD_SYSTEM_INFO_STREAM, dir->stream_type);\n\n MinidumpSystemInfo *md_system_info = minidump.GetSystemInfo();\n ASSERT_TRUE(md_system_info != NULL);\n ASSERT_EQ(\"windows\", md_system_info->GetOS());\n ASSERT_EQ(\"x86\", md_system_info->GetCPU());\n ASSERT_EQ(\"Petulant Pierogi\", *md_system_info->GetCSDVersion());\n ASSERT_EQ(\"GenuineIntel\", *md_system_info->GetCPUVendor());\n}\n\nTEST(Dump, BigDump) {\n Dump dump(0, kLittleEndian);\n\n // A SystemInfo stream.\n String csd_version(dump, \"Munificent Macaque\");\n SystemInfo system_info(dump, SystemInfo::windows_x86, csd_version);\n dump.Add(&csd_version);\n dump.Add(&system_info);\n\n // Five threads!\n Memory stack0(dump, 0x70b9ebfc);\n stack0.Append(\"stack for thread zero\");\n MDRawContextX86 raw_context0;\n raw_context0.context_flags = MD_CONTEXT_X86_INTEGER;\n raw_context0.eip = 0xaf0709e4;\n Context context0(dump, raw_context0);\n Thread thread0(dump, 0xbbef4432, stack0, context0,\n 0xd0377e7b, 0xdb8eb0cf, 0xd73bc314, 0x09d357bac7f9a163ULL);\n dump.Add(&stack0);\n dump.Add(&context0);\n dump.Add(&thread0);\n\n Memory stack1(dump, 0xf988cc45);\n stack1.Append(\"stack for thread one\");\n MDRawContextX86 raw_context1;\n raw_context1.context_flags = MD_CONTEXT_X86_INTEGER;\n raw_context1.eip = 0xe4f56f81;\n Context context1(dump, raw_context1);\n Thread thread1(dump, 0x657c3f58, stack1, context1,\n 0xa68fa182, 0x6f3cf8dd, 0xe3a78ccf, 0x78cc84775e4534bbULL);\n dump.Add(&stack1);\n dump.Add(&context1);\n dump.Add(&thread1);\n\n Memory stack2(dump, 0xc8a92e7c);\n stack2.Append(\"stack for thread two\");\n MDRawContextX86 raw_context2;\n raw_context2.context_flags = MD_CONTEXT_X86_INTEGER;\n raw_context2.eip = 0xb336a438;\n Context context2(dump, raw_context2);\n Thread thread2(dump, 0xdf4b8a71, stack2, context2,\n 0x674c26b6, 0x445d7120, 0x7e700c56, 0xd89bf778e7793e17ULL);\n dump.Add(&stack2);\n dump.Add(&context2);\n dump.Add(&thread2);\n\n Memory stack3(dump, 0x36d08e08);\n stack3.Append(\"stack for thread three\");\n MDRawContextX86 raw_context3;\n raw_context3.context_flags = MD_CONTEXT_X86_INTEGER;\n raw_context3.eip = 0xdf99a60c;\n Context context3(dump, raw_context3);\n Thread thread3(dump, 0x86e6c341, stack3, context3,\n 0x32dc5c55, 0x17a2aba8, 0xe0cc75e7, 0xa46393994dae83aeULL);\n dump.Add(&stack3);\n dump.Add(&context3);\n dump.Add(&thread3);\n\n Memory stack4(dump, 0x1e0ab4fa);\n stack4.Append(\"stack for thread four\");\n MDRawContextX86 raw_context4;\n raw_context4.context_flags = MD_CONTEXT_X86_INTEGER;\n raw_context4.eip = 0xaa646267;\n Context context4(dump, raw_context4);\n Thread thread4(dump, 0x261a28d4, stack4, context4,\n 0x6ebd389e, 0xa0cd4759, 0x30168846, 0x164f650a0cf39d35ULL);\n dump.Add(&stack4);\n dump.Add(&context4);\n dump.Add(&thread4);\n\n // Three modules!\n String module1_name(dump, \"module one\");\n Module module1(dump, 0xeb77da57b5d4cbdaULL, 0x83cd5a37, module1_name);\n dump.Add(&module1_name);\n dump.Add(&module1);\n\n String module2_name(dump, \"module two\");\n Module module2(dump, 0x8675884adfe5ac90ULL, 0xb11e4ea3, module2_name);\n dump.Add(&module2_name);\n dump.Add(&module2);\n\n String module3_name(dump, \"module three\");\n Module module3(dump, 0x95fc1544da321b6cULL, 0x7c2bf081, module3_name);\n dump.Add(&module3_name);\n dump.Add(&module3);\n\n // Add one more memory region, on top of the five stacks.\n Memory memory5(dump, 0x61979e828040e564ULL);\n memory5.Append(\"contents of memory 5\");\n dump.Add(&memory5);\n\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(4U, minidump.GetDirectoryEntryCount());\n\n // Check the threads.\n MinidumpThreadList *thread_list = minidump.GetThreadList();\n ASSERT_TRUE(thread_list != NULL);\n ASSERT_EQ(5U, thread_list->thread_count());\n u_int32_t thread_id;\n ASSERT_TRUE(thread_list->GetThreadAtIndex(0)->GetThreadID(&thread_id));\n ASSERT_EQ(0xbbef4432U, thread_id);\n ASSERT_EQ(0x70b9ebfcU,\n thread_list->GetThreadAtIndex(0)->GetMemory()->GetBase());\n ASSERT_EQ(0xaf0709e4U,\n thread_list->GetThreadAtIndex(0)->GetContext()->GetContextX86()\n ->eip);\n\n ASSERT_TRUE(thread_list->GetThreadAtIndex(1)->GetThreadID(&thread_id));\n ASSERT_EQ(0x657c3f58U, thread_id);\n ASSERT_EQ(0xf988cc45U,\n thread_list->GetThreadAtIndex(1)->GetMemory()->GetBase());\n ASSERT_EQ(0xe4f56f81U,\n thread_list->GetThreadAtIndex(1)->GetContext()->GetContextX86()\n ->eip);\n\n ASSERT_TRUE(thread_list->GetThreadAtIndex(2)->GetThreadID(&thread_id));\n ASSERT_EQ(0xdf4b8a71U, thread_id);\n ASSERT_EQ(0xc8a92e7cU,\n thread_list->GetThreadAtIndex(2)->GetMemory()->GetBase());\n ASSERT_EQ(0xb336a438U,\n thread_list->GetThreadAtIndex(2)->GetContext()->GetContextX86()\n ->eip);\n\n ASSERT_TRUE(thread_list->GetThreadAtIndex(3)->GetThreadID(&thread_id));\n ASSERT_EQ(0x86e6c341U, thread_id);\n ASSERT_EQ(0x36d08e08U,\n thread_list->GetThreadAtIndex(3)->GetMemory()->GetBase());\n ASSERT_EQ(0xdf99a60cU,\n thread_list->GetThreadAtIndex(3)->GetContext()->GetContextX86()\n ->eip);\n\n ASSERT_TRUE(thread_list->GetThreadAtIndex(4)->GetThreadID(&thread_id));\n ASSERT_EQ(0x261a28d4U, thread_id);\n ASSERT_EQ(0x1e0ab4faU,\n thread_list->GetThreadAtIndex(4)->GetMemory()->GetBase());\n ASSERT_EQ(0xaa646267U,\n thread_list->GetThreadAtIndex(4)->GetContext()->GetContextX86()\n ->eip);\n\n // Check the modules.\n MinidumpModuleList *md_module_list = minidump.GetModuleList();\n ASSERT_TRUE(md_module_list != NULL);\n ASSERT_EQ(3U, md_module_list->module_count());\n EXPECT_EQ(0xeb77da57b5d4cbdaULL,\n md_module_list->GetModuleAtIndex(0)->base_address());\n EXPECT_EQ(0x8675884adfe5ac90ULL,\n md_module_list->GetModuleAtIndex(1)->base_address());\n EXPECT_EQ(0x95fc1544da321b6cULL,\n md_module_list->GetModuleAtIndex(2)->base_address());\n}\n\nTEST(Dump, OneMemoryInfo) {\n Dump dump(0, kBigEndian);\n Stream stream(dump, MD_MEMORY_INFO_LIST_STREAM);\n\n // Add the MDRawMemoryInfoList header.\n const u_int64_t kNumberOfEntries = 1;\n stream.D32(sizeof(MDRawMemoryInfoList)) // size_of_header\n .D32(sizeof(MDRawMemoryInfo)) // size_of_entry\n .D64(kNumberOfEntries); // number_of_entries\n\n \n // Now add a MDRawMemoryInfo entry.\n const u_int64_t kBaseAddress = 0x1000;\n const u_int64_t kRegionSize = 0x2000;\n stream.D64(kBaseAddress) // base_address\n .D64(kBaseAddress) // allocation_base\n .D32(MD_MEMORY_PROTECT_EXECUTE_READWRITE) // allocation_protection\n .D32(0) // __alignment1\n .D64(kRegionSize) // region_size\n .D32(MD_MEMORY_STATE_COMMIT) // state\n .D32(MD_MEMORY_PROTECT_EXECUTE_READWRITE) // protection\n .D32(MD_MEMORY_TYPE_PRIVATE) // type\n .D32(0); // __alignment2\n\n dump.Add(&stream);\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(1U, minidump.GetDirectoryEntryCount());\n\n const MDRawDirectory *dir = minidump.GetDirectoryEntryAtIndex(0);\n ASSERT_TRUE(dir != NULL);\n EXPECT_EQ((u_int32_t) MD_MEMORY_INFO_LIST_STREAM, dir->stream_type);\n\n MinidumpMemoryInfoList *info_list = minidump.GetMemoryInfoList();\n ASSERT_TRUE(info_list != NULL);\n ASSERT_EQ(1U, info_list->info_count());\n\n const MinidumpMemoryInfo *info1 = info_list->GetMemoryInfoAtIndex(0);\n ASSERT_EQ(kBaseAddress, info1->GetBase());\n ASSERT_EQ(kRegionSize, info1->GetSize());\n ASSERT_TRUE(info1->IsExecutable());\n ASSERT_TRUE(info1->IsWritable());\n\n // Should get back the same memory region here.\n const MinidumpMemoryInfo *info2 =\n info_list->GetMemoryInfoForAddress(kBaseAddress + kRegionSize / 2);\n ASSERT_EQ(kBaseAddress, info2->GetBase());\n ASSERT_EQ(kRegionSize, info2->GetSize());\n}\n\nTEST(Dump, OneExceptionX86) {\n Dump dump(0, kLittleEndian);\n\n MDRawContextX86 raw_context;\n raw_context.context_flags = MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL;\n raw_context.edi = 0x3ecba80d;\n raw_context.esi = 0x382583b9;\n raw_context.ebx = 0x7fccc03f;\n raw_context.edx = 0xf62f8ec2;\n raw_context.ecx = 0x46a6a6a8;\n raw_context.eax = 0x6a5025e2;\n raw_context.ebp = 0xd9fabb4a;\n raw_context.eip = 0x6913f540;\n raw_context.cs = 0xbffe6eda;\n raw_context.eflags = 0xb2ce1e2d;\n raw_context.esp = 0x659caaa4;\n raw_context.ss = 0x2e951ef7;\n Context context(dump, raw_context);\n\n Exception exception(dump, context,\n 0x1234abcd, // thread id\n 0xdcba4321, // exception code\n 0xf0e0d0c0, // exception flags\n 0x0919a9b9c9d9e9f9ULL); // exception address\n \n dump.Add(&context);\n dump.Add(&exception);\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(1U, minidump.GetDirectoryEntryCount());\n\n MinidumpException *md_exception = minidump.GetException();\n ASSERT_TRUE(md_exception != NULL);\n\n u_int32_t thread_id;\n ASSERT_TRUE(md_exception->GetThreadID(&thread_id));\n ASSERT_EQ(0x1234abcdU, thread_id);\n\n const MDRawExceptionStream* raw_exception = md_exception->exception();\n ASSERT_TRUE(raw_exception != NULL);\n EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code);\n EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags);\n EXPECT_EQ(0x0919a9b9c9d9e9f9ULL,\n raw_exception->exception_record.exception_address);\n\n MinidumpContext *md_context = md_exception->GetContext();\n ASSERT_TRUE(md_context != NULL);\n ASSERT_EQ((u_int32_t) MD_CONTEXT_X86, md_context->GetContextCPU());\n const MDRawContextX86 *md_raw_context = md_context->GetContextX86();\n ASSERT_TRUE(md_raw_context != NULL);\n ASSERT_EQ((u_int32_t) (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL),\n (md_raw_context->context_flags\n & (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL)));\n EXPECT_EQ(0x3ecba80dU, raw_context.edi);\n EXPECT_EQ(0x382583b9U, raw_context.esi);\n EXPECT_EQ(0x7fccc03fU, raw_context.ebx);\n EXPECT_EQ(0xf62f8ec2U, raw_context.edx);\n EXPECT_EQ(0x46a6a6a8U, raw_context.ecx);\n EXPECT_EQ(0x6a5025e2U, raw_context.eax);\n EXPECT_EQ(0xd9fabb4aU, raw_context.ebp);\n EXPECT_EQ(0x6913f540U, raw_context.eip);\n EXPECT_EQ(0xbffe6edaU, raw_context.cs);\n EXPECT_EQ(0xb2ce1e2dU, raw_context.eflags);\n EXPECT_EQ(0x659caaa4U, raw_context.esp);\n EXPECT_EQ(0x2e951ef7U, raw_context.ss);\n}\n\nTEST(Dump, OneExceptionX86XState) {\n Dump dump(0, kLittleEndian);\n\n MDRawContextX86 raw_context;\n raw_context.context_flags = MD_CONTEXT_X86_INTEGER |\n MD_CONTEXT_X86_CONTROL | MD_CONTEXT_X86_XSTATE;\n raw_context.edi = 0x3ecba80d;\n raw_context.esi = 0x382583b9;\n raw_context.ebx = 0x7fccc03f;\n raw_context.edx = 0xf62f8ec2;\n raw_context.ecx = 0x46a6a6a8;\n raw_context.eax = 0x6a5025e2;\n raw_context.ebp = 0xd9fabb4a;\n raw_context.eip = 0x6913f540;\n raw_context.cs = 0xbffe6eda;\n raw_context.eflags = 0xb2ce1e2d;\n raw_context.esp = 0x659caaa4;\n raw_context.ss = 0x2e951ef7;\n Context context(dump, raw_context);\n\n Exception exception(dump, context,\n 0x1234abcd, // thread id\n 0xdcba4321, // exception code\n 0xf0e0d0c0, // exception flags\n 0x0919a9b9c9d9e9f9ULL); // exception address\n \n dump.Add(&context);\n dump.Add(&exception);\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(1U, minidump.GetDirectoryEntryCount());\n\n MinidumpException *md_exception = minidump.GetException();\n ASSERT_TRUE(md_exception != NULL);\n\n u_int32_t thread_id;\n ASSERT_TRUE(md_exception->GetThreadID(&thread_id));\n ASSERT_EQ(0x1234abcdU, thread_id);\n\n const MDRawExceptionStream* raw_exception = md_exception->exception();\n ASSERT_TRUE(raw_exception != NULL);\n EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code);\n EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags);\n EXPECT_EQ(0x0919a9b9c9d9e9f9ULL,\n raw_exception->exception_record.exception_address);\n\n MinidumpContext *md_context = md_exception->GetContext();\n ASSERT_TRUE(md_context != NULL);\n ASSERT_EQ((u_int32_t) MD_CONTEXT_X86, md_context->GetContextCPU());\n const MDRawContextX86 *md_raw_context = md_context->GetContextX86();\n ASSERT_TRUE(md_raw_context != NULL);\n ASSERT_EQ((u_int32_t) (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL),\n (md_raw_context->context_flags\n & (MD_CONTEXT_X86_INTEGER | MD_CONTEXT_X86_CONTROL)));\n EXPECT_EQ(0x3ecba80dU, raw_context.edi);\n EXPECT_EQ(0x382583b9U, raw_context.esi);\n EXPECT_EQ(0x7fccc03fU, raw_context.ebx);\n EXPECT_EQ(0xf62f8ec2U, raw_context.edx);\n EXPECT_EQ(0x46a6a6a8U, raw_context.ecx);\n EXPECT_EQ(0x6a5025e2U, raw_context.eax);\n EXPECT_EQ(0xd9fabb4aU, raw_context.ebp);\n EXPECT_EQ(0x6913f540U, raw_context.eip);\n EXPECT_EQ(0xbffe6edaU, raw_context.cs);\n EXPECT_EQ(0xb2ce1e2dU, raw_context.eflags);\n EXPECT_EQ(0x659caaa4U, raw_context.esp);\n EXPECT_EQ(0x2e951ef7U, raw_context.ss);\n}\n\nTEST(Dump, OneExceptionARM) {\n Dump dump(0, kLittleEndian);\n\n MDRawContextARM raw_context;\n raw_context.context_flags = MD_CONTEXT_ARM_INTEGER;\n raw_context.iregs[0] = 0x3ecba80d;\n raw_context.iregs[1] = 0x382583b9;\n raw_context.iregs[2] = 0x7fccc03f;\n raw_context.iregs[3] = 0xf62f8ec2;\n raw_context.iregs[4] = 0x46a6a6a8;\n raw_context.iregs[5] = 0x6a5025e2;\n raw_context.iregs[6] = 0xd9fabb4a;\n raw_context.iregs[7] = 0x6913f540;\n raw_context.iregs[8] = 0xbffe6eda;\n raw_context.iregs[9] = 0xb2ce1e2d;\n raw_context.iregs[10] = 0x659caaa4;\n raw_context.iregs[11] = 0xf0e0d0c0;\n raw_context.iregs[12] = 0xa9b8c7d6;\n raw_context.iregs[13] = 0x12345678;\n raw_context.iregs[14] = 0xabcd1234;\n raw_context.iregs[15] = 0x10203040;\n raw_context.cpsr = 0x2e951ef7;\n Context context(dump, raw_context);\n\n Exception exception(dump, context,\n 0x1234abcd, // thread id\n 0xdcba4321, // exception code\n 0xf0e0d0c0, // exception flags\n 0x0919a9b9c9d9e9f9ULL); // exception address\n \n dump.Add(&context);\n dump.Add(&exception);\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(1U, minidump.GetDirectoryEntryCount());\n\n MinidumpException *md_exception = minidump.GetException();\n ASSERT_TRUE(md_exception != NULL);\n\n u_int32_t thread_id;\n ASSERT_TRUE(md_exception->GetThreadID(&thread_id));\n ASSERT_EQ(0x1234abcdU, thread_id);\n\n const MDRawExceptionStream* raw_exception = md_exception->exception();\n ASSERT_TRUE(raw_exception != NULL);\n EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code);\n EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags);\n EXPECT_EQ(0x0919a9b9c9d9e9f9ULL,\n raw_exception->exception_record.exception_address);\n\n MinidumpContext *md_context = md_exception->GetContext();\n ASSERT_TRUE(md_context != NULL);\n ASSERT_EQ((u_int32_t) MD_CONTEXT_ARM, md_context->GetContextCPU());\n const MDRawContextARM *md_raw_context = md_context->GetContextARM();\n ASSERT_TRUE(md_raw_context != NULL);\n ASSERT_EQ((u_int32_t) MD_CONTEXT_ARM_INTEGER,\n (md_raw_context->context_flags\n & MD_CONTEXT_ARM_INTEGER));\n EXPECT_EQ(0x3ecba80dU, raw_context.iregs[0]);\n EXPECT_EQ(0x382583b9U, raw_context.iregs[1]);\n EXPECT_EQ(0x7fccc03fU, raw_context.iregs[2]);\n EXPECT_EQ(0xf62f8ec2U, raw_context.iregs[3]);\n EXPECT_EQ(0x46a6a6a8U, raw_context.iregs[4]);\n EXPECT_EQ(0x6a5025e2U, raw_context.iregs[5]);\n EXPECT_EQ(0xd9fabb4aU, raw_context.iregs[6]);\n EXPECT_EQ(0x6913f540U, raw_context.iregs[7]);\n EXPECT_EQ(0xbffe6edaU, raw_context.iregs[8]);\n EXPECT_EQ(0xb2ce1e2dU, raw_context.iregs[9]);\n EXPECT_EQ(0x659caaa4U, raw_context.iregs[10]);\n EXPECT_EQ(0xf0e0d0c0U, raw_context.iregs[11]);\n EXPECT_EQ(0xa9b8c7d6U, raw_context.iregs[12]);\n EXPECT_EQ(0x12345678U, raw_context.iregs[13]);\n EXPECT_EQ(0xabcd1234U, raw_context.iregs[14]);\n EXPECT_EQ(0x10203040U, raw_context.iregs[15]);\n EXPECT_EQ(0x2e951ef7U, raw_context.cpsr);\n}\n\nTEST(Dump, OneExceptionARMOldFlags) {\n Dump dump(0, kLittleEndian);\n\n MDRawContextARM raw_context;\n // MD_CONTEXT_ARM_INTEGER, but with _OLD\n raw_context.context_flags = MD_CONTEXT_ARM_OLD | 0x00000002;\n raw_context.iregs[0] = 0x3ecba80d;\n raw_context.iregs[1] = 0x382583b9;\n raw_context.iregs[2] = 0x7fccc03f;\n raw_context.iregs[3] = 0xf62f8ec2;\n raw_context.iregs[4] = 0x46a6a6a8;\n raw_context.iregs[5] = 0x6a5025e2;\n raw_context.iregs[6] = 0xd9fabb4a;\n raw_context.iregs[7] = 0x6913f540;\n raw_context.iregs[8] = 0xbffe6eda;\n raw_context.iregs[9] = 0xb2ce1e2d;\n raw_context.iregs[10] = 0x659caaa4;\n raw_context.iregs[11] = 0xf0e0d0c0;\n raw_context.iregs[12] = 0xa9b8c7d6;\n raw_context.iregs[13] = 0x12345678;\n raw_context.iregs[14] = 0xabcd1234;\n raw_context.iregs[15] = 0x10203040;\n raw_context.cpsr = 0x2e951ef7;\n Context context(dump, raw_context);\n\n Exception exception(dump, context,\n 0x1234abcd, // thread id\n 0xdcba4321, // exception code\n 0xf0e0d0c0, // exception flags\n 0x0919a9b9c9d9e9f9ULL); // exception address\n \n dump.Add(&context);\n dump.Add(&exception);\n dump.Finish();\n\n string contents;\n ASSERT_TRUE(dump.GetContents(&contents));\n\n istringstream minidump_stream(contents);\n Minidump minidump(minidump_stream);\n ASSERT_TRUE(minidump.Read());\n ASSERT_EQ(1U, minidump.GetDirectoryEntryCount());\n\n MinidumpException *md_exception = minidump.GetException();\n ASSERT_TRUE(md_exception != NULL);\n\n u_int32_t thread_id;\n ASSERT_TRUE(md_exception->GetThreadID(&thread_id));\n ASSERT_EQ(0x1234abcdU, thread_id);\n\n const MDRawExceptionStream* raw_exception = md_exception->exception();\n ASSERT_TRUE(raw_exception != NULL);\n EXPECT_EQ(0xdcba4321, raw_exception->exception_record.exception_code);\n EXPECT_EQ(0xf0e0d0c0, raw_exception->exception_record.exception_flags);\n EXPECT_EQ(0x0919a9b9c9d9e9f9ULL,\n raw_exception->exception_record.exception_address);\n\n MinidumpContext *md_context = md_exception->GetContext();\n ASSERT_TRUE(md_context != NULL);\n ASSERT_EQ((u_int32_t) MD_CONTEXT_ARM, md_context->GetContextCPU());\n const MDRawContextARM *md_raw_context = md_context->GetContextARM();\n ASSERT_TRUE(md_raw_context != NULL);\n ASSERT_EQ((u_int32_t) MD_CONTEXT_ARM_INTEGER,\n (md_raw_context->context_flags\n & MD_CONTEXT_ARM_INTEGER));\n EXPECT_EQ(0x3ecba80dU, raw_context.iregs[0]);\n EXPECT_EQ(0x382583b9U, raw_context.iregs[1]);\n EXPECT_EQ(0x7fccc03fU, raw_context.iregs[2]);\n EXPECT_EQ(0xf62f8ec2U, raw_context.iregs[3]);\n EXPECT_EQ(0x46a6a6a8U, raw_context.iregs[4]);\n EXPECT_EQ(0x6a5025e2U, raw_context.iregs[5]);\n EXPECT_EQ(0xd9fabb4aU, raw_context.iregs[6]);\n EXPECT_EQ(0x6913f540U, raw_context.iregs[7]);\n EXPECT_EQ(0xbffe6edaU, raw_context.iregs[8]);\n EXPECT_EQ(0xb2ce1e2dU, raw_context.iregs[9]);\n EXPECT_EQ(0x659caaa4U, raw_context.iregs[10]);\n EXPECT_EQ(0xf0e0d0c0U, raw_context.iregs[11]);\n EXPECT_EQ(0xa9b8c7d6U, raw_context.iregs[12]);\n EXPECT_EQ(0x12345678U, raw_context.iregs[13]);\n EXPECT_EQ(0xabcd1234U, raw_context.iregs[14]);\n EXPECT_EQ(0x10203040U, raw_context.iregs[15]);\n EXPECT_EQ(0x2e951ef7U, raw_context.cpsr);\n}\n\n} // namespace\n", "meta": {"content_hash": "f94f54cb89eb0e4ea499942621181f06", "timestamp": "", "source": "github", "line_count": 955, "max_line_length": 78, "avg_line_length": 36.30994764397906, "alnum_prop": 0.6985234744491867, "repo_name": "mhoran/google-breakpad", "id": "1faf169d5c2a5128982dd28aecbfcbf57613fd52", "size": "36710", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/processor/minidump_unittest.cc", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Assembly", "bytes": "7921"}, {"name": "C", "bytes": "1701689"}, {"name": "C++", "bytes": "6143738"}, {"name": "Objective-C", "bytes": "455134"}, {"name": "Perl", "bytes": "125"}, {"name": "Prolog", "bytes": "3262"}, {"name": "Python", "bytes": "447592"}, {"name": "Ruby", "bytes": "107"}, {"name": "Shell", "bytes": "565789"}]}} {"text": "//====================================================================================================================//\n// File: qcan_server_memeory.hpp //\n// Description: QCAN classes - CAN server //\n// //\n// Copyright (C) MicroControl GmbH & Co. KG //\n// 53844 Troisdorf - Germany //\n// www.microcontrol.net //\n// //\n//--------------------------------------------------------------------------------------------------------------------//\n// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the //\n// following conditions are met: //\n// 1. Redistributions of source code must retain the above copyright notice, this list of conditions, the following //\n// disclaimer and the referenced file 'LICENSE'. //\n// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the //\n// following disclaimer in the documentation and/or other materials provided with the distribution. //\n// 3. Neither the name of MicroControl nor the names of its contributors may be used to endorse or promote products //\n// derived from this software without specific prior written permission. //\n// //\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance //\n// with the License. You may obtain a copy of the License at //\n// //\n// http://www.apache.org/licenses/LICENSE-2.0 //\n// //\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed //\n// on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for //\n// the specific language governing permissions and limitations under the License. //\n// //\n//====================================================================================================================//\n\n\n/*--------------------------------------------------------------------------------------------------------------------*\\\n** Include files **\n** **\n\\*--------------------------------------------------------------------------------------------------------------------*/\n\n#include \"qcan_defs.hpp\"\n\n/*--------------------------------------------------------------------------------------------------------------------*\\\n** Definitions **\n** **\n\\*--------------------------------------------------------------------------------------------------------------------*/\n\n#define QCAN_MEMORY_KEY \"QCAN_SERVER_SHARED_KEY\"\n\n/*--------------------------------------------------------------------------------------------------------------------*\\\n** Structures **\n** **\n\\*--------------------------------------------------------------------------------------------------------------------*/\n\ntypedef struct Server_s {\n uint32_t ulVersionMajor;\n uint32_t ulVersionMinor;\n} Server_ts;\n\ntypedef struct Network_s {\n uint32_t ulVersionMajor;\n uint32_t ulVersionMinor;\n} Network_ts;\n\ntypedef struct ServerSettings_s {\n Server_ts tsServer;\n Network_ts atsNetwork[QCAN_NETWORK_MAX];\n} ServerSettings_ts;\n\n", "meta": {"content_hash": "788a0242d048df14435187c11527b5aa", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 120, "avg_line_length": 82.03125, "alnum_prop": 0.27390476190476193, "repo_name": "JoTid/CANpie", "id": "4f6e40cb83b578d82ad00dca3ad4767a7bcfa101", "size": "5250", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "source/qcan/qcan_server_memory.hpp", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "1816"}, {"name": "C", "bytes": "580757"}, {"name": "C++", "bytes": "1054376"}, {"name": "Makefile", "bytes": "16403"}, {"name": "Objective-C", "bytes": "47189"}, {"name": "Prolog", "bytes": "1153"}, {"name": "QMake", "bytes": "55893"}, {"name": "Shell", "bytes": "848"}]}} {"text": "[react-babylonjs](../README.md) / [Exports](../modules.md) / loaders\n\n# Module: loaders\n\n## Table of contents\n\n### Enumerations\n\n- [LoaderStatus](../enums/loaders.loaderstatus.md)\n- [TaskType](../enums/loaders.tasktype.md)\n\n### Classes\n\n- [LoadedModel](../classes/loaders.loadedmodel.md)\n\n### Interfaces\n\n- [ILoadedModel](../interfaces/loaders.iloadedmodel.md)\n\n### Type aliases\n\n- [AssetManagerContextProviderProps](loaders.md#assetmanagercontextproviderprops)\n- [AssetManagerContextType](loaders.md#assetmanagercontexttype)\n- [AssetManagerOptions](loaders.md#assetmanageroptions)\n- [AssetManagerProgressType](loaders.md#assetmanagerprogresstype)\n- [BinaryTask](loaders.md#binarytask)\n- [MeshTask](loaders.md#meshtask)\n- [SceneLoaderContextProviderProps](loaders.md#sceneloadercontextproviderprops)\n- [SceneLoaderContextType](loaders.md#sceneloadercontexttype)\n- [SceneLoaderOptions](loaders.md#sceneloaderoptions)\n- [Task](loaders.md#task)\n- [TextureTask](loaders.md#texturetask)\n\n### Variables\n\n- [AssetManagerContext](loaders.md#assetmanagercontext)\n- [AssetManagerContextProvider](loaders.md#assetmanagercontextprovider)\n- [SceneLoaderContext](loaders.md#sceneloadercontext)\n- [SceneLoaderContextProvider](loaders.md#sceneloadercontextprovider)\n\n### Functions\n\n- [useAssetManager](loaders.md#useassetmanager)\n- [useSceneLoader](loaders.md#usesceneloader)\n\n## Type aliases\n\n### AssetManagerContextProviderProps\n\n\u01ac **AssetManagerContextProviderProps**: { `children`: React.ReactNode ; `startProgress?`: [*AssetManagerProgressType*](loaders_useassetmanager.md#assetmanagerprogresstype) }\n\n#### Type declaration:\n\nName | Type |\n------ | ------ |\n`children` | React.ReactNode |\n`startProgress?` | [*AssetManagerProgressType*](loaders_useassetmanager.md#assetmanagerprogresstype) |\n\nDefined in: [loaders/useAssetManager.tsx:48](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L48)\n\n___\n\n### AssetManagerContextType\n\n\u01ac **AssetManagerContextType**: { `lastProgress?`: [*AssetManagerProgressType*](loaders_useassetmanager.md#assetmanagerprogresstype) ; `updateProgress`: (`progress`: [*AssetManagerProgressType*](loaders_useassetmanager.md#assetmanagerprogresstype)) => *void* } \\| *undefined*\n\nDefined in: [loaders/useAssetManager.tsx:36](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L36)\n\n___\n\n### AssetManagerOptions\n\n\u01ac **AssetManagerOptions**: { `reportProgress?`: *boolean* ; `scene?`: Scene ; `useDefaultLoadingScreen?`: *boolean* }\n\n#### Type declaration:\n\nName | Type |\n------ | ------ |\n`reportProgress?` | *boolean* |\n`scene?` | Scene |\n`useDefaultLoadingScreen?` | *boolean* |\n\nDefined in: [loaders/useAssetManager.tsx:61](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L61)\n\n___\n\n### AssetManagerProgressType\n\n\u01ac **AssetManagerProgressType**: { `eventData`: IAssetsProgressEvent ; `eventState`: EventState } \\| *undefined*\n\nDefined in: [loaders/useAssetManager.tsx:43](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L43)\n\n___\n\n### BinaryTask\n\n\u01ac **BinaryTask**: { `name`: *string* ; `taskType`: [*Binary*](../enums/loaders/useassetmanager.tasktype.md#binary) ; `url`: *string* }\n\n#### Type declaration:\n\nName | Type |\n------ | ------ |\n`name` | *string* |\n`taskType` | [*Binary*](../enums/loaders/useassetmanager.tasktype.md#binary) |\n`url` | *string* |\n\nDefined in: [loaders/useAssetManager.tsx:11](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L11)\n\n___\n\n### MeshTask\n\n\u01ac **MeshTask**: { `meshesNames?`: *any* ; `name`: *string* ; `rootUrl`: *string* ; `sceneFilename`: *string* ; `taskType`: [*Mesh*](../enums/loaders/useassetmanager.tasktype.md#mesh) }\n\n#### Type declaration:\n\nName | Type |\n------ | ------ |\n`meshesNames?` | *any* |\n`name` | *string* |\n`rootUrl` | *string* |\n`sceneFilename` | *string* |\n`taskType` | [*Mesh*](../enums/loaders/useassetmanager.tasktype.md#mesh) |\n\nDefined in: [loaders/useAssetManager.tsx:17](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L17)\n\n___\n\n### SceneLoaderContextProviderProps\n\n\u01ac **SceneLoaderContextProviderProps**: { `children`: React.ReactNode ; `startProgress?`: ISceneLoaderProgressEvent }\n\n#### Type declaration:\n\nName | Type |\n------ | ------ |\n`children` | React.ReactNode |\n`startProgress?` | ISceneLoaderProgressEvent |\n\nDefined in: [loaders/useSceneLoader.tsx:14](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useSceneLoader.tsx#L14)\n\n___\n\n### SceneLoaderContextType\n\n\u01ac **SceneLoaderContextType**: { `lastProgress?`: *Nullable* ; `updateProgress`: (`progress`: ISceneLoaderProgressEvent) => *void* } \\| *undefined*\n\nDefined in: [loaders/useSceneLoader.tsx:7](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useSceneLoader.tsx#L7)\n\n___\n\n### SceneLoaderOptions\n\n\u01ac **SceneLoaderOptions**: { `alwaysSelectAsActiveMesh?`: *boolean* ; `onModelLoaded?`: (`loadedModel`: [*ILoadedModel*](../interfaces/loaders/loadedmodel.iloadedmodel.md)) => *void* ; `receiveShadows?`: *boolean* ; `reportProgress?`: *boolean* ; `scaleToDimension?`: *number* ; `scene?`: Scene }\n\n#### Type declaration:\n\nName | Type | Description |\n------ | ------ | ------ |\n`alwaysSelectAsActiveMesh?` | *boolean* | Always select root mesh as active. Defaults to false. |\n`onModelLoaded?` | (`loadedModel`: [*ILoadedModel*](../interfaces/loaders/loadedmodel.iloadedmodel.md)) => *void* | Access to loaded model as soon as it is loaded, so it provides a way to hide or scale the meshes before the first render. |\n`receiveShadows?` | *boolean* | set that all meshes receive shadows. Defaults to false. |\n`reportProgress?` | *boolean* | SceneLoader progress events are set on context provider (when available). Defaults to false. |\n`scaleToDimension?` | *number* | Scale entire model within these square bounds Defaults to no scaling. |\n`scene?` | Scene | - |\n\nDefined in: [loaders/useSceneLoader.tsx:27](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useSceneLoader.tsx#L27)\n\n___\n\n### Task\n\n\u01ac **Task**: [*BinaryTask*](loaders_useassetmanager.md#binarytask) \\| [*MeshTask*](loaders_useassetmanager.md#meshtask) \\| [*TextureTask*](loaders_useassetmanager.md#texturetask)\n\nDefined in: [loaders/useAssetManager.tsx:34](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L34)\n\n___\n\n### TextureTask\n\n\u01ac **TextureTask**: { `invertY?`: *boolean* ; `name`: *string* ; `noMipmap?`: *boolean* ; `samplingMode?`: *number* ; `taskType`: [*Texture*](../enums/loaders/useassetmanager.tasktype.md#texture) ; `url`: *string* }\n\n#### Type declaration:\n\nName | Type |\n------ | ------ |\n`invertY?` | *boolean* |\n`name` | *string* |\n`noMipmap?` | *boolean* |\n`samplingMode?` | *number* |\n`taskType` | [*Texture*](../enums/loaders/useassetmanager.tasktype.md#texture) |\n`url` | *string* |\n\nDefined in: [loaders/useAssetManager.tsx:25](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L25)\n\n## Variables\n\n### AssetManagerContext\n\n\u2022 `Const` **AssetManagerContext**: *Context*<[*AssetManagerContextType*](loaders_useassetmanager.md#assetmanagercontexttype)\\>\n\nDefined in: [loaders/useAssetManager.tsx:41](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L41)\n\n___\n\n### AssetManagerContextProvider\n\n\u2022 `Const` **AssetManagerContextProvider**: *React.FC*<[*AssetManagerContextProviderProps*](loaders_useassetmanager.md#assetmanagercontextproviderprops)\\>\n\nDefined in: [loaders/useAssetManager.tsx:53](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L53)\n\n___\n\n### SceneLoaderContext\n\n\u2022 `Const` **SceneLoaderContext**: *Context*<[*SceneLoaderContextType*](loaders_usesceneloader.md#sceneloadercontexttype)\\>\n\nDefined in: [loaders/useSceneLoader.tsx:12](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useSceneLoader.tsx#L12)\n\n___\n\n### SceneLoaderContextProvider\n\n\u2022 `Const` **SceneLoaderContextProvider**: *React.FC*<[*SceneLoaderContextProviderProps*](loaders_usesceneloader.md#sceneloadercontextproviderprops)\\>\n\nDefined in: [loaders/useSceneLoader.tsx:19](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useSceneLoader.tsx#L19)\n\n## Functions\n\n### useAssetManager\n\n\u25b8 `Const`**useAssetManager**(`tasks`: [*Task*](loaders_useassetmanager.md#task)[], `options?`: [*AssetManagerOptions*](loaders_useassetmanager.md#assetmanageroptions)): AssetManagerResult\n\n#### Parameters:\n\nName | Type |\n------ | ------ |\n`tasks` | [*Task*](loaders_useassetmanager.md#task)[] |\n`options?` | [*AssetManagerOptions*](loaders_useassetmanager.md#assetmanageroptions) |\n\n**Returns:** AssetManagerResult\n\nDefined in: [loaders/useAssetManager.tsx:249](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useAssetManager.tsx#L249)\n\n___\n\n### useSceneLoader\n\n\u25b8 `Const`**useSceneLoader**(`rootUrl`: *string*, `sceneFilename`: *string*, `pluginExtension?`: *string*, `options?`: [*SceneLoaderOptions*](loaders_usesceneloader.md#sceneloaderoptions)): [*LoadedModel*](../classes/loaders/loadedmodel.loadedmodel.md)\n\n#### Parameters:\n\nName | Type |\n------ | ------ |\n`rootUrl` | *string* |\n`sceneFilename` | *string* |\n`pluginExtension?` | *string* |\n`options?` | [*SceneLoaderOptions*](loaders_usesceneloader.md#sceneloaderoptions) |\n\n**Returns:** [*LoadedModel*](../classes/loaders/loadedmodel.loadedmodel.md)\n\nDefined in: [loaders/useSceneLoader.tsx:202](https://github.com/brianzinn/react-babylonjs/blob/eba7b00/src/hooks/loaders/useSceneLoader.tsx#L202)\n", "meta": {"content_hash": "efc4b738c896959586b0276690520833", "timestamp": "", "source": "github", "line_count": 262, "max_line_length": 296, "avg_line_length": 37.48091603053435, "alnum_prop": 0.7344195519348269, "repo_name": "brianzinn/react-babylonJS", "id": "dec1387e403e9efd81620c5642b04ed3df2913ea", "size": "9843", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/hooks/modules/loaders.md", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "1014"}, {"name": "TypeScript", "bytes": "24537"}]}} {"text": "function SendChatMessage(...)\nend;\n\nfunction SendAddonMessage(...)\nend;\n\nfunction GetDefaultLanguage()\n\treturn \"Common\";\nend;\n", "meta": {"content_hash": "b7f92922f981fb3896a0311358c8fdcc", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 30, "avg_line_length": 14.0, "alnum_prop": 0.746031746031746, "repo_name": "Thonik/FritoMod", "id": "752bbbd9051336f91963032e6e76ff3befa68dc4", "size": "126", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "wow/api/Chat.lua", "mode": "33188", "license": "mit", "language": []}} {"text": "package com.intellij.java.codeInsight.daemon;\n\nimport com.intellij.JavaTestUtil;\nimport com.intellij.codeInsight.daemon.DaemonAnalyzerTestCase;\nimport com.intellij.codeInsight.daemon.impl.HighlightInfo;\nimport com.intellij.codeInsight.intention.IntentionAction;\nimport com.intellij.codeInspection.redundantCast.RedundantCastInspection;\nimport com.intellij.lang.annotation.HighlightSeverity;\nimport com.intellij.lang.java.JavaDocumentationProvider;\nimport com.intellij.openapi.editor.Document;\nimport com.intellij.openapi.editor.Editor;\nimport com.intellij.openapi.editor.EditorFactory;\nimport com.intellij.openapi.progress.EmptyProgressIndicator;\nimport com.intellij.openapi.progress.ProgressManager;\nimport com.intellij.openapi.project.Project;\nimport com.intellij.openapi.util.Disposer;\nimport com.intellij.psi.*;\nimport com.intellij.psi.infos.CandidateInfo;\nimport com.intellij.psi.search.searches.ReferencesSearch;\nimport com.intellij.psi.util.PsiTreeUtil;\nimport com.intellij.psi.util.TypeConversionUtil;\nimport com.intellij.testFramework.fixtures.LightJavaCodeInsightFixtureTestCase;\nimport com.intellij.testFramework.fixtures.impl.CodeInsightTestFixtureImpl;\nimport com.intellij.util.ArrayUtilRt;\n\nimport java.util.List;\n\nimport static org.assertj.core.api.Assertions.assertThat;\n\npublic class LightAdvHighlightingFixtureTest extends LightJavaCodeInsightFixtureTestCase {\n \n\n @Override\n protected String getBasePath() {\n return JavaTestUtil.getRelativeJavaTestDataPath() + \"/codeInsight/daemonCodeAnalyzer/advFixture\";\n }\n\n public void testHidingOnDemandImports() {\n //noinspection StaticNonFinalField\n myFixture.addClass(\"package foo; public class Foo {\" +\n \" public static String foo;\" +\n \"}\");\n myFixture.addClass(\"package foo; public class Bar {\" +\n \" public static void foo(String s) {}\" +\n \"}\");\n myFixture.configureByFile(getTestName(false) + \".java\");\n myFixture.checkHighlighting(false, false, false);\n }\n\n public void testFilteredCandidates() {\n PsiFile file = myFixture.configureByText(\"a.java\", \"class a {{new StringBuilder().append();}}\");\n PsiCallExpression callExpression =\n PsiTreeUtil.getParentOfType(file.findElementAt(myFixture.getEditor().getCaretModel().getOffset()), PsiCallExpression.class);\n assertNotNull(callExpression);\n CandidateInfo[] candidates =\n PsiResolveHelper.getInstance(myFixture.getProject()).getReferencedMethodCandidates(callExpression, false);\n assertSize(27, candidates);\n String generateDoc = new JavaDocumentationProvider().generateDoc(callExpression, callExpression);\n assertEquals(\"Candidates for method call new StringBuilder().append() are:

  \" +\n \"StringBuilder append(Object)
  \" +\n \"StringBuilder append(String)
  \" +\n \"StringBuilder append(StringBuilder)
  \" +\n \"StringBuilder append(StringBuffer)
  \" +\n \"StringBuilder append(CharSequence)
  \" +\n \"StringBuilder append(CharSequence, int, int)
  \" +\n \"StringBuilder append(char[])
  \" +\n \"StringBuilder append(char[], int, int)
  \" +\n \"StringBuilder append(boolean)
  \" +\n \"StringBuilder append(char)
  \" +\n \"StringBuilder append(int)
  \" +\n \"StringBuilder append(long)
  \" +\n \"StringBuilder append(float)
  \" +\n \"StringBuilder append(double)
\", generateDoc);\n }\n\n public void testPackageNamedAsClassInDefaultPackage() {\n myFixture.addClass(\"package test; public class A {}\");\n PsiClass aClass = myFixture.addClass(\"public class test {}\");\n doTest();\n assertNull(ProgressManager.getInstance().runProcess(() -> ReferencesSearch.search(aClass).findFirst(), new EmptyProgressIndicator()));\n }\n\n public void testPackageNameAsClassFQName() {\n myFixture.addClass(\"package foo.Bar; class A {}\");\n myFixture.addClass(\"package foo; public class Bar { public static class Inner {}}\");\n doTest();\n }\n\n public void testInaccessibleFunctionalTypeParameter() {\n myFixture.addClass(\"package test; class A {}\");\n myFixture.addClass(\"package test; public interface I { void m(A a);}\");\n myFixture.addClass(\"package test; public interface J { A m();}\");\n doTest();\n }\n\n public void testBoundsPromotionWithCapturedWildcards() {\n myFixture.addClass(\"package a; public interface Provider {}\");\n myFixture.addClass(\"package b; public interface Provider {}\");\n doTest();\n }\n\n public void testStaticImportCompoundWithInheritance() {\n myFixture.addClass(\"package a; public interface A { static void foo(Object o){} static void foo(String str) {}}\");\n doTest();\n }\n\n public void testSuppressedInGenerated() {\n myFixture.enableInspections(new RedundantCastInspection());\n myFixture.addClass(\"package javax.annotation; public @interface Generated {}\");\n doTest();\n }\n\n public void testReferenceThroughInheritance() {\n myFixture.addClass(\"package test;\\n\" +\n \"public class A {\\n\" +\n \" public static class B {}\\n\" +\n \"}\");\n doTest();\n }\n\n public void testReferenceThroughInheritance1() {\n //noinspection UnnecessaryInterfaceModifier\n myFixture.addClass(\"package me;\\n\" +\n \"import me.Serializer.Format;\\n\" +\n \"public interface Serializer {\\n\" +\n \" public static interface Format {}\\n\" +\n \"}\\n\");\n doTest();\n }\n\n public void testUsageOfProtectedAnnotationOutsideAPackage() {\n myFixture.addClass(\"package a;\\n\" +\n \"import java.lang.annotation.ElementType;\\n\" +\n \"import java.lang.annotation.Target;\\n\" +\n \"\\n\" +\n \"public class A {\\n\" +\n \" @Target( { ElementType.METHOD, ElementType.TYPE } )\\n\" +\n \" protected @interface Test{\\n\" +\n \" }\\n\" +\n \"}\");\n doTest();\n }\n\n public void testPackageLocalClassUsedInArrayTypeOutsidePackage() {\n myFixture.addClass(\"package a; class A {}\");\n myFixture.addClass(\"package a; public class B {public static A[] getAs() {return null;}}\");\n doTest();\n }\n\n public void testProtectedFieldUsedInAnnotationParameterOfInheritor() {\n myFixture.addClass(\"package a; public class A {protected final static String A_FOO = \\\"A\\\";}\");\n doTest();\n }\n\n public void testStaticImportClassConflictingWithPackageName() {\n myFixture.addClass(\"package p.P1; class Unrelated {}\");\n myFixture.addClass(\"package p; public class P1 {public static final int FOO = 1;}\");\n doTest();\n }\n\n public void testAmbiguousMethodCallWhenStaticImported() {\n myFixture.addClass(\"package p;\" +\n \"class A {\\n\" +\n \" static A of(T t) {\\n\" +\n \" return null;\\n\" +\n \" }\\n\" +\n \"}\\n\" +\n \"class B {\\n\" +\n \" static B of(T t) {\\n\" +\n \" return null;\\n\" +\n \" }\\n\" +\n \" static B of(T... t) {\\n\" +\n \" return null;\\n\" +\n \" }\\n\" +\n \"}\\n\");\n doTest();\n }\n\n public void testClassPackageConflict() {\n myFixture.addClass(\"package a; public class b {}\");\n myFixture.addClass(\"package c; public class a {}\");\n doTest();\n }\n\n public void testClassPackageConflict1() {\n myFixture.addClass(\"package a; public class b {}\");\n myFixture.addClass(\"package c.d; public class a {}\");\n doTest();\n }\n\n public void testTypeAnnotations() {\n myFixture.addClass(\"import java.lang.annotation.ElementType;\\n\" +\n \"import java.lang.annotation.Target;\\n\" +\n \"@Target({ElementType.TYPE_USE})\\n\" +\n \"@interface Nullable {}\\n\");\n myFixture.addClass(\"class Middle extends Base<@Nullable R, String>{}\");\n myFixture.addClass(\"class Child extends Middle{}\");\n PsiClass baseClass = myFixture.addClass(\"class Base {}\");\n PsiClass fooClass = myFixture.addClass(\"class Foo {\\n\" +\n \" Child field;\\n\" +\n \"}\");\n PsiField fooField = fooClass.findFieldByName(\"field\", false);\n PsiType substituted =\n TypeConversionUtil.getSuperClassSubstitutor(baseClass, (PsiClassType)fooField.getType()).substitute(baseClass.getTypeParameters()[0]);\n assertEquals(1, substituted.getAnnotations().length);\n }\n\n public void testCodeFragmentMayAccessDefaultPackage() {\n myFixture.addClass(\"public class MainClass { }\");\n\n Project project = getProject();\n PsiElement context = JavaPsiFacade.getInstance(project).findPackage(\"\");\n JavaCodeFragment fragment = JavaCodeFragmentFactory.getInstance(project).createReferenceCodeFragment(\"MainClass\", context, true, true);\n Document document = PsiDocumentManager.getInstance(project).getDocument(fragment);\n Editor editor = EditorFactory.getInstance().createViewer(document, project);\n Disposer.register(myFixture.getTestRootDisposable(), () -> EditorFactory.getInstance().releaseEditor(editor));\n\n List highlights = CodeInsightTestFixtureImpl.instantiateAndRun(fragment, editor, ArrayUtilRt.EMPTY_INT_ARRAY, false);\n List problems = DaemonAnalyzerTestCase.filter(highlights, HighlightSeverity.WARNING);\n assertThat(problems).isEmpty();\n }\n\n public void testImplicitConstructorAccessibility() {\n myFixture.addClass(\"package a; public class Base {\" +\n \"private Base() {}\\n\" +\n \"protected Base(int... i) {}\\n\" +\n \"}\");\n doTest();\n }\n\n public void testDiamondsWithAnonymousProtectedConstructor() {\n myFixture.addClass(\"package a; public class Base { protected Base() {}}\");\n doTest();\n }\n \n public void testDiamondsWithProtectedCallInConstruction() {\n myFixture.addClass(\"package a; public class Base { \" +\n \" protected String createString() {return null;}\" +\n \"}\");\n doTest();\n }\n\n public void testDefaultAnnotationsApplicability() {\n myFixture.addClass(\"package foo; public @interface A {}\");\n myFixture.configureByFile(\"module-info.java\");\n myFixture.checkHighlighting();\n }\n\n public void testAlwaysFalseForLoop() {\n doTest();\n IntentionAction action = myFixture.findSingleIntention(\"Remove 'for' statement\");\n myFixture.launchAction(action);\n myFixture.checkResultByFile(getTestName(false) + \"_after.java\");\n }\n\n public void testProtectedInnerClass() {\n myFixture.addClass(\"package a;\\n\" +\n \"public class Outer {\\n\" +\n \" public Object get(Inner key) {\\n\" +\n \" return null;\\n\" +\n \" }\\n\" +\n \" public Inner get1() {return null;} \\n\" +\n \" public Inner f; \\n\" +\n \" protected class Inner {}\\n\" +\n \"}\");\n doTest();\n }\n \n public void testProtectedInnerClass1() {\n myFixture.addClass(\"package a;\\n\" +\n \"public class A {\\n\" +\n \" public T getData() {return null;}\\n\" +\n \"}\");\n myFixture.addClass(\"package a;\\n\" +\n \"public class Outer extends A {\\n\" +\n \" protected class Inner {}\\n\" +\n \"}\");\n doTest();\n }\n\n private void doTest() {\n myFixture.configureByFile(getTestName(false) + \".java\");\n myFixture.checkHighlighting();\n }\n}", "meta": {"content_hash": "ef7d91961df677bfcbca9006a8fcdde5", "timestamp": "", "source": "github", "line_count": 285, "max_line_length": 176, "avg_line_length": 46.435087719298245, "alnum_prop": 0.6320840259936528, "repo_name": "google/intellij-community", "id": "913fb7ace30ea2c7bbdb34897f4ae4c15628b887", "size": "13355", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "java/java-tests/testSrc/com/intellij/java/codeInsight/daemon/LightAdvHighlightingFixtureTest.java", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "// Copyright 2012 The Chromium Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style license that can be\n// found in the LICENSE file.\n\n// Unit tests for the SyncApi. Note that a lot of the underlying\n// functionality is provided by the Syncable layer, which has its own\n// unit tests. We'll test SyncApi specific things in this harness.\n\n#include \n#include \n\n#include \"base/basictypes.h\"\n#include \"base/callback.h\"\n#include \"base/compiler_specific.h\"\n#include \"base/files/scoped_temp_dir.h\"\n#include \"base/format_macros.h\"\n#include \"base/location.h\"\n#include \"base/memory/scoped_ptr.h\"\n#include \"base/message_loop.h\"\n#include \"base/message_loop/message_loop_proxy.h\"\n#include \"base/strings/string_number_conversions.h\"\n#include \"base/strings/stringprintf.h\"\n#include \"base/strings/utf_string_conversions.h\"\n#include \"base/test/values_test_util.h\"\n#include \"base/values.h\"\n#include \"sync/engine/sync_scheduler.h\"\n#include \"sync/internal_api/public/base/model_type_test_util.h\"\n#include \"sync/internal_api/public/change_record.h\"\n#include \"sync/internal_api/public/engine/model_safe_worker.h\"\n#include \"sync/internal_api/public/engine/polling_constants.h\"\n#include \"sync/internal_api/public/http_post_provider_factory.h\"\n#include \"sync/internal_api/public/http_post_provider_interface.h\"\n#include \"sync/internal_api/public/read_node.h\"\n#include \"sync/internal_api/public/read_transaction.h\"\n#include \"sync/internal_api/public/test/test_entry_factory.h\"\n#include \"sync/internal_api/public/test/test_internal_components_factory.h\"\n#include \"sync/internal_api/public/test/test_user_share.h\"\n#include \"sync/internal_api/public/write_node.h\"\n#include \"sync/internal_api/public/write_transaction.h\"\n#include \"sync/internal_api/sync_encryption_handler_impl.h\"\n#include \"sync/internal_api/sync_manager_impl.h\"\n#include \"sync/internal_api/syncapi_internal.h\"\n#include \"sync/js/js_arg_list.h\"\n#include \"sync/js/js_backend.h\"\n#include \"sync/js/js_event_handler.h\"\n#include \"sync/js/js_reply_handler.h\"\n#include \"sync/js/js_test_util.h\"\n#include \"sync/notifier/fake_invalidation_handler.h\"\n#include \"sync/notifier/fake_invalidator.h\"\n#include \"sync/notifier/invalidation_handler.h\"\n#include \"sync/notifier/invalidator.h\"\n#include \"sync/protocol/bookmark_specifics.pb.h\"\n#include \"sync/protocol/encryption.pb.h\"\n#include \"sync/protocol/extension_specifics.pb.h\"\n#include \"sync/protocol/password_specifics.pb.h\"\n#include \"sync/protocol/preference_specifics.pb.h\"\n#include \"sync/protocol/proto_value_conversions.h\"\n#include \"sync/protocol/sync.pb.h\"\n#include \"sync/sessions/sync_session.h\"\n#include \"sync/syncable/directory.h\"\n#include \"sync/syncable/entry.h\"\n#include \"sync/syncable/mutable_entry.h\"\n#include \"sync/syncable/nigori_util.h\"\n#include \"sync/syncable/syncable_id.h\"\n#include \"sync/syncable/syncable_read_transaction.h\"\n#include \"sync/syncable/syncable_util.h\"\n#include \"sync/syncable/syncable_write_transaction.h\"\n#include \"sync/test/callback_counter.h\"\n#include \"sync/test/engine/fake_sync_scheduler.h\"\n#include \"sync/test/engine/test_id_factory.h\"\n#include \"sync/test/fake_encryptor.h\"\n#include \"sync/test/fake_extensions_activity_monitor.h\"\n#include \"sync/util/cryptographer.h\"\n#include \"sync/util/extensions_activity_monitor.h\"\n#include \"sync/util/test_unrecoverable_error_handler.h\"\n#include \"sync/util/time.h\"\n#include \"testing/gmock/include/gmock/gmock.h\"\n#include \"testing/gtest/include/gtest/gtest.h\"\n\nusing base::ExpectDictStringValue;\nusing testing::_;\nusing testing::DoAll;\nusing testing::InSequence;\nusing testing::Return;\nusing testing::SaveArg;\nusing testing::StrictMock;\n\nnamespace syncer {\n\nusing sessions::SyncSessionSnapshot;\nusing syncable::GET_BY_HANDLE;\nusing syncable::IS_DEL;\nusing syncable::IS_UNSYNCED;\nusing syncable::NON_UNIQUE_NAME;\nusing syncable::SPECIFICS;\nusing syncable::kEncryptedString;\n\nnamespace {\n\nconst char kTestChromeVersion[] = \"test chrome version\";\n\nvoid ExpectInt64Value(int64 expected_value,\n const base::DictionaryValue& value,\n const std::string& key) {\n std::string int64_str;\n EXPECT_TRUE(value.GetString(key, &int64_str));\n int64 val = 0;\n EXPECT_TRUE(base::StringToInt64(int64_str, &val));\n EXPECT_EQ(expected_value, val);\n}\n\nvoid ExpectTimeValue(const base::Time& expected_value,\n const base::DictionaryValue& value,\n const std::string& key) {\n std::string time_str;\n EXPECT_TRUE(value.GetString(key, &time_str));\n EXPECT_EQ(GetTimeDebugString(expected_value), time_str);\n}\n\n// Makes a non-folder child of the root node. Returns the id of the\n// newly-created node.\nint64 MakeNode(UserShare* share,\n ModelType model_type,\n const std::string& client_tag) {\n WriteTransaction trans(FROM_HERE, share);\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n WriteNode node(&trans);\n WriteNode::InitUniqueByCreationResult result =\n node.InitUniqueByCreation(model_type, root_node, client_tag);\n EXPECT_EQ(WriteNode::INIT_SUCCESS, result);\n node.SetIsFolder(false);\n return node.GetId();\n}\n\n// Makes a folder child of a non-root node. Returns the id of the\n// newly-created node.\nint64 MakeFolderWithParent(UserShare* share,\n ModelType model_type,\n int64 parent_id,\n BaseNode* predecessor) {\n WriteTransaction trans(FROM_HERE, share);\n ReadNode parent_node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, parent_node.InitByIdLookup(parent_id));\n WriteNode node(&trans);\n EXPECT_TRUE(node.InitBookmarkByCreation(parent_node, predecessor));\n node.SetIsFolder(true);\n return node.GetId();\n}\n\nint64 MakeBookmarkWithParent(UserShare* share,\n int64 parent_id,\n BaseNode* predecessor) {\n WriteTransaction trans(FROM_HERE, share);\n ReadNode parent_node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, parent_node.InitByIdLookup(parent_id));\n WriteNode node(&trans);\n EXPECT_TRUE(node.InitBookmarkByCreation(parent_node, predecessor));\n return node.GetId();\n}\n\n// Creates the \"synced\" root node for a particular datatype. We use the syncable\n// methods here so that the syncer treats these nodes as if they were already\n// received from the server.\nint64 MakeServerNodeForType(UserShare* share,\n ModelType model_type) {\n sync_pb::EntitySpecifics specifics;\n AddDefaultFieldValue(model_type, &specifics);\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::UNITTEST, share->directory.get());\n // Attempt to lookup by nigori tag.\n std::string type_tag = ModelTypeToRootTag(model_type);\n syncable::Id node_id = syncable::Id::CreateFromServerId(type_tag);\n syncable::MutableEntry entry(&trans, syncable::CREATE_NEW_UPDATE_ITEM,\n node_id);\n EXPECT_TRUE(entry.good());\n entry.Put(syncable::BASE_VERSION, 1);\n entry.Put(syncable::SERVER_VERSION, 1);\n entry.Put(syncable::IS_UNAPPLIED_UPDATE, false);\n entry.Put(syncable::SERVER_PARENT_ID, syncable::GetNullId());\n entry.Put(syncable::SERVER_IS_DIR, true);\n entry.Put(syncable::IS_DIR, true);\n entry.Put(syncable::SERVER_SPECIFICS, specifics);\n entry.Put(syncable::UNIQUE_SERVER_TAG, type_tag);\n entry.Put(syncable::NON_UNIQUE_NAME, type_tag);\n entry.Put(syncable::IS_DEL, false);\n entry.Put(syncable::SPECIFICS, specifics);\n return entry.Get(syncable::META_HANDLE);\n}\n\n// Simulates creating a \"synced\" node as a child of the root datatype node.\nint64 MakeServerNode(UserShare* share, ModelType model_type,\n const std::string& client_tag,\n const std::string& hashed_tag,\n const sync_pb::EntitySpecifics& specifics) {\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::UNITTEST, share->directory.get());\n syncable::Entry root_entry(&trans, syncable::GET_BY_SERVER_TAG,\n ModelTypeToRootTag(model_type));\n EXPECT_TRUE(root_entry.good());\n syncable::Id root_id = root_entry.Get(syncable::ID);\n syncable::Id node_id = syncable::Id::CreateFromServerId(client_tag);\n syncable::MutableEntry entry(&trans, syncable::CREATE_NEW_UPDATE_ITEM,\n node_id);\n EXPECT_TRUE(entry.good());\n entry.Put(syncable::BASE_VERSION, 1);\n entry.Put(syncable::SERVER_VERSION, 1);\n entry.Put(syncable::IS_UNAPPLIED_UPDATE, false);\n entry.Put(syncable::SERVER_PARENT_ID, root_id);\n entry.Put(syncable::PARENT_ID, root_id);\n entry.Put(syncable::SERVER_IS_DIR, false);\n entry.Put(syncable::IS_DIR, false);\n entry.Put(syncable::SERVER_SPECIFICS, specifics);\n entry.Put(syncable::NON_UNIQUE_NAME, client_tag);\n entry.Put(syncable::UNIQUE_CLIENT_TAG, hashed_tag);\n entry.Put(syncable::IS_DEL, false);\n entry.Put(syncable::SPECIFICS, specifics);\n return entry.Get(syncable::META_HANDLE);\n}\n\n} // namespace\n\nclass SyncApiTest : public testing::Test {\n public:\n virtual void SetUp() {\n test_user_share_.SetUp();\n }\n\n virtual void TearDown() {\n test_user_share_.TearDown();\n }\n\n protected:\n base::MessageLoop message_loop_;\n TestUserShare test_user_share_;\n};\n\nTEST_F(SyncApiTest, SanityCheckTest) {\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n EXPECT_TRUE(trans.GetWrappedTrans());\n }\n {\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n EXPECT_TRUE(trans.GetWrappedTrans());\n }\n {\n // No entries but root should exist\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode node(&trans);\n // Metahandle 1 can be root, sanity check 2\n EXPECT_EQ(BaseNode::INIT_FAILED_ENTRY_NOT_GOOD, node.InitByIdLookup(2));\n }\n}\n\nTEST_F(SyncApiTest, BasicTagWrite) {\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n EXPECT_EQ(root_node.GetFirstChildId(), 0);\n }\n\n ignore_result(MakeNode(test_user_share_.user_share(),\n BOOKMARKS, \"testtag\"));\n\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, \"testtag\"));\n\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n EXPECT_NE(node.GetId(), 0);\n EXPECT_EQ(node.GetId(), root_node.GetFirstChildId());\n }\n}\n\nTEST_F(SyncApiTest, ModelTypesSiloed) {\n {\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n EXPECT_EQ(root_node.GetFirstChildId(), 0);\n }\n\n ignore_result(MakeNode(test_user_share_.user_share(),\n BOOKMARKS, \"collideme\"));\n ignore_result(MakeNode(test_user_share_.user_share(),\n PREFERENCES, \"collideme\"));\n ignore_result(MakeNode(test_user_share_.user_share(),\n AUTOFILL, \"collideme\"));\n\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n\n ReadNode bookmarknode(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n bookmarknode.InitByClientTagLookup(BOOKMARKS,\n \"collideme\"));\n\n ReadNode prefnode(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n prefnode.InitByClientTagLookup(PREFERENCES,\n \"collideme\"));\n\n ReadNode autofillnode(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n autofillnode.InitByClientTagLookup(AUTOFILL,\n \"collideme\"));\n\n EXPECT_NE(bookmarknode.GetId(), prefnode.GetId());\n EXPECT_NE(autofillnode.GetId(), prefnode.GetId());\n EXPECT_NE(bookmarknode.GetId(), autofillnode.GetId());\n }\n}\n\nTEST_F(SyncApiTest, ReadMissingTagsFails) {\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_FAILED_ENTRY_NOT_GOOD,\n node.InitByClientTagLookup(BOOKMARKS,\n \"testtag\"));\n }\n {\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_FAILED_ENTRY_NOT_GOOD,\n node.InitByClientTagLookup(BOOKMARKS,\n \"testtag\"));\n }\n}\n\n// TODO(chron): Hook this all up to the server and write full integration tests\n// for update->undelete behavior.\nTEST_F(SyncApiTest, TestDeleteBehavior) {\n int64 node_id;\n int64 folder_id;\n std::string test_title(\"test1\");\n\n {\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n\n // we'll use this spare folder later\n WriteNode folder_node(&trans);\n EXPECT_TRUE(folder_node.InitBookmarkByCreation(root_node, NULL));\n folder_id = folder_node.GetId();\n\n WriteNode wnode(&trans);\n WriteNode::InitUniqueByCreationResult result =\n wnode.InitUniqueByCreation(BOOKMARKS, root_node, \"testtag\");\n EXPECT_EQ(WriteNode::INIT_SUCCESS, result);\n wnode.SetIsFolder(false);\n wnode.SetTitle(UTF8ToWide(test_title));\n\n node_id = wnode.GetId();\n }\n\n // Ensure we can delete something with a tag.\n {\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n WriteNode wnode(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n wnode.InitByClientTagLookup(BOOKMARKS,\n \"testtag\"));\n EXPECT_FALSE(wnode.GetIsFolder());\n EXPECT_EQ(wnode.GetTitle(), test_title);\n\n wnode.Tombstone();\n }\n\n // Lookup of a node which was deleted should return failure,\n // but have found some data about the node.\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_FAILED_ENTRY_IS_DEL,\n node.InitByClientTagLookup(BOOKMARKS,\n \"testtag\"));\n // Note that for proper function of this API this doesn't need to be\n // filled, we're checking just to make sure the DB worked in this test.\n EXPECT_EQ(node.GetTitle(), test_title);\n }\n\n {\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode folder_node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, folder_node.InitByIdLookup(folder_id));\n\n WriteNode wnode(&trans);\n // This will undelete the tag.\n WriteNode::InitUniqueByCreationResult result =\n wnode.InitUniqueByCreation(BOOKMARKS, folder_node, \"testtag\");\n EXPECT_EQ(WriteNode::INIT_SUCCESS, result);\n EXPECT_EQ(wnode.GetIsFolder(), false);\n EXPECT_EQ(wnode.GetParentId(), folder_node.GetId());\n EXPECT_EQ(wnode.GetId(), node_id);\n EXPECT_NE(wnode.GetTitle(), test_title); // Title should be cleared\n wnode.SetTitle(UTF8ToWide(test_title));\n }\n\n // Now look up should work.\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS,\n \"testtag\"));\n EXPECT_EQ(node.GetTitle(), test_title);\n EXPECT_EQ(node.GetModelType(), BOOKMARKS);\n }\n}\n\nTEST_F(SyncApiTest, WriteAndReadPassword) {\n KeyParams params = {\"localhost\", \"username\", \"passphrase\"};\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n trans.GetCryptographer()->AddKey(params);\n }\n {\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n\n WriteNode password_node(&trans);\n WriteNode::InitUniqueByCreationResult result =\n password_node.InitUniqueByCreation(PASSWORDS,\n root_node, \"foo\");\n EXPECT_EQ(WriteNode::INIT_SUCCESS, result);\n sync_pb::PasswordSpecificsData data;\n data.set_password_value(\"secret\");\n password_node.SetPasswordSpecifics(data);\n }\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n\n ReadNode password_node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n password_node.InitByClientTagLookup(PASSWORDS, \"foo\"));\n const sync_pb::PasswordSpecificsData& data =\n password_node.GetPasswordSpecifics();\n EXPECT_EQ(\"secret\", data.password_value());\n }\n}\n\nTEST_F(SyncApiTest, WriteEncryptedTitle) {\n KeyParams params = {\"localhost\", \"username\", \"passphrase\"};\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n trans.GetCryptographer()->AddKey(params);\n }\n test_user_share_.encryption_handler()->EnableEncryptEverything();\n int bookmark_id;\n {\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n\n WriteNode bookmark_node(&trans);\n ASSERT_TRUE(bookmark_node.InitBookmarkByCreation(root_node, NULL));\n bookmark_id = bookmark_node.GetId();\n bookmark_node.SetTitle(UTF8ToWide(\"foo\"));\n\n WriteNode pref_node(&trans);\n WriteNode::InitUniqueByCreationResult result =\n pref_node.InitUniqueByCreation(PREFERENCES, root_node, \"bar\");\n ASSERT_EQ(WriteNode::INIT_SUCCESS, result);\n pref_node.SetTitle(UTF8ToWide(\"bar\"));\n }\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n\n ReadNode bookmark_node(&trans);\n ASSERT_EQ(BaseNode::INIT_OK, bookmark_node.InitByIdLookup(bookmark_id));\n EXPECT_EQ(\"foo\", bookmark_node.GetTitle());\n EXPECT_EQ(kEncryptedString,\n bookmark_node.GetEntry()->Get(syncable::NON_UNIQUE_NAME));\n\n ReadNode pref_node(&trans);\n ASSERT_EQ(BaseNode::INIT_OK,\n pref_node.InitByClientTagLookup(PREFERENCES,\n \"bar\"));\n EXPECT_EQ(kEncryptedString, pref_node.GetTitle());\n }\n}\n\nTEST_F(SyncApiTest, BaseNodeSetSpecifics) {\n int64 child_id = MakeNode(test_user_share_.user_share(),\n BOOKMARKS, \"testtag\");\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByIdLookup(child_id));\n\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_bookmark()->set_url(\"http://www.google.com\");\n\n EXPECT_NE(entity_specifics.SerializeAsString(),\n node.GetEntitySpecifics().SerializeAsString());\n node.SetEntitySpecifics(entity_specifics);\n EXPECT_EQ(entity_specifics.SerializeAsString(),\n node.GetEntitySpecifics().SerializeAsString());\n}\n\nTEST_F(SyncApiTest, BaseNodeSetSpecificsPreservesUnknownFields) {\n int64 child_id = MakeNode(test_user_share_.user_share(),\n BOOKMARKS, \"testtag\");\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByIdLookup(child_id));\n EXPECT_TRUE(node.GetEntitySpecifics().unknown_fields().empty());\n\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_bookmark()->set_url(\"http://www.google.com\");\n entity_specifics.mutable_unknown_fields()->AddFixed32(5, 100);\n node.SetEntitySpecifics(entity_specifics);\n EXPECT_FALSE(node.GetEntitySpecifics().unknown_fields().empty());\n\n entity_specifics.mutable_unknown_fields()->Clear();\n node.SetEntitySpecifics(entity_specifics);\n EXPECT_FALSE(node.GetEntitySpecifics().unknown_fields().empty());\n}\n\nnamespace {\n\nvoid CheckNodeValue(const BaseNode& node, const base::DictionaryValue& value,\n bool is_detailed) {\n size_t expected_field_count = 4;\n\n ExpectInt64Value(node.GetId(), value, \"id\");\n {\n bool is_folder = false;\n EXPECT_TRUE(value.GetBoolean(\"isFolder\", &is_folder));\n EXPECT_EQ(node.GetIsFolder(), is_folder);\n }\n ExpectDictStringValue(node.GetTitle(), value, \"title\");\n\n ModelType expected_model_type = node.GetModelType();\n std::string type_str;\n EXPECT_TRUE(value.GetString(\"type\", &type_str));\n if (expected_model_type >= FIRST_REAL_MODEL_TYPE) {\n ModelType model_type = ModelTypeFromString(type_str);\n EXPECT_EQ(expected_model_type, model_type);\n } else if (expected_model_type == TOP_LEVEL_FOLDER) {\n EXPECT_EQ(\"Top-level folder\", type_str);\n } else if (expected_model_type == UNSPECIFIED) {\n EXPECT_EQ(\"Unspecified\", type_str);\n } else {\n ADD_FAILURE();\n }\n\n if (is_detailed) {\n {\n scoped_ptr expected_entry(\n node.GetEntry()->ToValue(NULL));\n const base::Value* entry = NULL;\n EXPECT_TRUE(value.Get(\"entry\", &entry));\n EXPECT_TRUE(base::Value::Equals(entry, expected_entry.get()));\n }\n\n ExpectInt64Value(node.GetParentId(), value, \"parentId\");\n ExpectTimeValue(node.GetModificationTime(), value, \"modificationTime\");\n ExpectInt64Value(node.GetExternalId(), value, \"externalId\");\n expected_field_count += 4;\n\n if (value.HasKey(\"predecessorId\")) {\n ExpectInt64Value(node.GetPredecessorId(), value, \"predecessorId\");\n expected_field_count++;\n }\n if (value.HasKey(\"successorId\")) {\n ExpectInt64Value(node.GetSuccessorId(), value, \"successorId\");\n expected_field_count++;\n }\n if (value.HasKey(\"firstChildId\")) {\n ExpectInt64Value(node.GetFirstChildId(), value, \"firstChildId\");\n expected_field_count++;\n }\n }\n\n EXPECT_EQ(expected_field_count, value.size());\n}\n\n} // namespace\n\nTEST_F(SyncApiTest, BaseNodeGetSummaryAsValue) {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode node(&trans);\n node.InitByRootLookup();\n scoped_ptr details(node.GetSummaryAsValue());\n if (details) {\n CheckNodeValue(node, *details, false);\n } else {\n ADD_FAILURE();\n }\n}\n\nTEST_F(SyncApiTest, BaseNodeGetDetailsAsValue) {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode node(&trans);\n node.InitByRootLookup();\n scoped_ptr details(node.GetDetailsAsValue());\n if (details) {\n CheckNodeValue(node, *details, true);\n } else {\n ADD_FAILURE();\n }\n}\n\nTEST_F(SyncApiTest, EmptyTags) {\n WriteTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n WriteNode node(&trans);\n std::string empty_tag;\n WriteNode::InitUniqueByCreationResult result =\n node.InitUniqueByCreation(TYPED_URLS, root_node, empty_tag);\n EXPECT_NE(WriteNode::INIT_SUCCESS, result);\n EXPECT_EQ(BaseNode::INIT_FAILED_PRECONDITION,\n node.InitByTagLookup(empty_tag));\n}\n\n// Test counting nodes when the type's root node has no children.\nTEST_F(SyncApiTest, GetTotalNodeCountEmpty) {\n int64 type_root = MakeServerNodeForType(test_user_share_.user_share(),\n BOOKMARKS);\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode type_root_node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n type_root_node.InitByIdLookup(type_root));\n EXPECT_EQ(1, type_root_node.GetTotalNodeCount());\n }\n}\n\n// Test counting nodes when there is one child beneath the type's root.\nTEST_F(SyncApiTest, GetTotalNodeCountOneChild) {\n int64 type_root = MakeServerNodeForType(test_user_share_.user_share(),\n BOOKMARKS);\n int64 parent = MakeFolderWithParent(test_user_share_.user_share(),\n BOOKMARKS,\n type_root,\n NULL);\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode type_root_node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n type_root_node.InitByIdLookup(type_root));\n EXPECT_EQ(2, type_root_node.GetTotalNodeCount());\n ReadNode parent_node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n parent_node.InitByIdLookup(parent));\n EXPECT_EQ(1, parent_node.GetTotalNodeCount());\n }\n}\n\n// Test counting nodes when there are multiple children beneath the type root,\n// and one of those children has children of its own.\nTEST_F(SyncApiTest, GetTotalNodeCountMultipleChildren) {\n int64 type_root = MakeServerNodeForType(test_user_share_.user_share(),\n BOOKMARKS);\n int64 parent = MakeFolderWithParent(test_user_share_.user_share(),\n BOOKMARKS,\n type_root,\n NULL);\n ignore_result(MakeFolderWithParent(test_user_share_.user_share(),\n BOOKMARKS,\n type_root,\n NULL));\n int64 child1 = MakeFolderWithParent(\n test_user_share_.user_share(),\n BOOKMARKS,\n parent,\n NULL);\n ignore_result(MakeBookmarkWithParent(\n test_user_share_.user_share(),\n parent,\n NULL));\n ignore_result(MakeBookmarkWithParent(\n test_user_share_.user_share(),\n child1,\n NULL));\n\n {\n ReadTransaction trans(FROM_HERE, test_user_share_.user_share());\n ReadNode type_root_node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n type_root_node.InitByIdLookup(type_root));\n EXPECT_EQ(6, type_root_node.GetTotalNodeCount());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByIdLookup(parent));\n EXPECT_EQ(4, node.GetTotalNodeCount());\n }\n}\n\nnamespace {\n\nclass TestHttpPostProviderInterface : public HttpPostProviderInterface {\n public:\n virtual ~TestHttpPostProviderInterface() {}\n\n virtual void SetExtraRequestHeaders(const char* headers) OVERRIDE {}\n virtual void SetURL(const char* url, int port) OVERRIDE {}\n virtual void SetPostPayload(const char* content_type,\n int content_length,\n const char* content) OVERRIDE {}\n virtual bool MakeSynchronousPost(int* error_code, int* response_code)\n OVERRIDE {\n return false;\n }\n virtual int GetResponseContentLength() const OVERRIDE {\n return 0;\n }\n virtual const char* GetResponseContent() const OVERRIDE {\n return \"\";\n }\n virtual const std::string GetResponseHeaderValue(\n const std::string& name) const OVERRIDE {\n return std::string();\n }\n virtual void Abort() OVERRIDE {}\n};\n\nclass TestHttpPostProviderFactory : public HttpPostProviderFactory {\n public:\n virtual ~TestHttpPostProviderFactory() {}\n virtual HttpPostProviderInterface* Create() OVERRIDE {\n return new TestHttpPostProviderInterface();\n }\n virtual void Destroy(HttpPostProviderInterface* http) OVERRIDE {\n delete static_cast(http);\n }\n};\n\nclass SyncManagerObserverMock : public SyncManager::Observer {\n public:\n MOCK_METHOD1(OnSyncCycleCompleted,\n void(const SyncSessionSnapshot&)); // NOLINT\n MOCK_METHOD4(OnInitializationComplete,\n void(const WeakHandle&,\n const WeakHandle&,\n bool,\n syncer::ModelTypeSet)); // NOLINT\n MOCK_METHOD1(OnConnectionStatusChange, void(ConnectionStatus)); // NOLINT\n MOCK_METHOD0(OnStopSyncingPermanently, void()); // NOLINT\n MOCK_METHOD1(OnUpdatedToken, void(const std::string&)); // NOLINT\n MOCK_METHOD1(OnActionableError,\n void(const SyncProtocolError&)); // NOLINT\n};\n\nclass SyncEncryptionHandlerObserverMock\n : public SyncEncryptionHandler::Observer {\n public:\n MOCK_METHOD2(OnPassphraseRequired,\n void(PassphraseRequiredReason,\n const sync_pb::EncryptedData&)); // NOLINT\n MOCK_METHOD0(OnPassphraseAccepted, void()); // NOLINT\n MOCK_METHOD2(OnBootstrapTokenUpdated,\n void(const std::string&, BootstrapTokenType type)); // NOLINT\n MOCK_METHOD2(OnEncryptedTypesChanged,\n void(ModelTypeSet, bool)); // NOLINT\n MOCK_METHOD0(OnEncryptionComplete, void()); // NOLINT\n MOCK_METHOD1(OnCryptographerStateChanged, void(Cryptographer*)); // NOLINT\n MOCK_METHOD2(OnPassphraseTypeChanged, void(PassphraseType,\n base::Time)); // NOLINT\n};\n\n} // namespace\n\nclass SyncManagerTest : public testing::Test,\n public SyncManager::ChangeDelegate {\n protected:\n enum NigoriStatus {\n DONT_WRITE_NIGORI,\n WRITE_TO_NIGORI\n };\n\n enum EncryptionStatus {\n UNINITIALIZED,\n DEFAULT_ENCRYPTION,\n FULL_ENCRYPTION\n };\n\n SyncManagerTest()\n : fake_invalidator_(NULL),\n sync_manager_(\"Test sync manager\") {\n switches_.encryption_method =\n InternalComponentsFactory::ENCRYPTION_KEYSTORE;\n }\n\n virtual ~SyncManagerTest() {\n EXPECT_FALSE(fake_invalidator_);\n }\n\n // Test implementation.\n void SetUp() {\n ASSERT_TRUE(temp_dir_.CreateUniqueTempDir());\n\n SyncCredentials credentials;\n credentials.email = \"foo@bar.com\";\n credentials.sync_token = \"sometoken\";\n\n fake_invalidator_ = new FakeInvalidator();\n\n sync_manager_.AddObserver(&manager_observer_);\n EXPECT_CALL(manager_observer_, OnInitializationComplete(_, _, _, _)).\n WillOnce(SaveArg<0>(&js_backend_));\n\n EXPECT_FALSE(js_backend_.IsInitialized());\n\n std::vector workers;\n ModelSafeRoutingInfo routing_info;\n GetModelSafeRoutingInfo(&routing_info);\n\n // Takes ownership of |fake_invalidator_|.\n sync_manager_.Init(\n temp_dir_.path(),\n WeakHandle(),\n \"bogus\",\n 0,\n false,\n scoped_ptr(new TestHttpPostProviderFactory()),\n workers,\n &extensions_activity_monitor_,\n this,\n credentials,\n scoped_ptr(fake_invalidator_),\n \"fake_invalidator_client_id\",\n std::string(),\n std::string(), // bootstrap tokens\n scoped_ptr(GetFactory()),\n &encryptor_,\n &handler_,\n NULL,\n false);\n\n sync_manager_.GetEncryptionHandler()->AddObserver(&encryption_observer_);\n\n EXPECT_TRUE(js_backend_.IsInitialized());\n\n for (ModelSafeRoutingInfo::iterator i = routing_info.begin();\n i != routing_info.end(); ++i) {\n type_roots_[i->first] = MakeServerNodeForType(\n sync_manager_.GetUserShare(), i->first);\n }\n PumpLoop();\n\n EXPECT_TRUE(fake_invalidator_->IsHandlerRegistered(&sync_manager_));\n }\n\n void TearDown() {\n sync_manager_.RemoveObserver(&manager_observer_);\n sync_manager_.ShutdownOnSyncThread();\n // We can't assert that |sync_manager_| isn't registered with\n // |fake_invalidator_| anymore because |fake_invalidator_| is now\n // destroyed.\n fake_invalidator_ = NULL;\n PumpLoop();\n }\n\n void GetModelSafeRoutingInfo(ModelSafeRoutingInfo* out) {\n (*out)[NIGORI] = GROUP_PASSIVE;\n (*out)[DEVICE_INFO] = GROUP_PASSIVE;\n (*out)[EXPERIMENTS] = GROUP_PASSIVE;\n (*out)[BOOKMARKS] = GROUP_PASSIVE;\n (*out)[THEMES] = GROUP_PASSIVE;\n (*out)[SESSIONS] = GROUP_PASSIVE;\n (*out)[PASSWORDS] = GROUP_PASSIVE;\n (*out)[PREFERENCES] = GROUP_PASSIVE;\n (*out)[PRIORITY_PREFERENCES] = GROUP_PASSIVE;\n }\n\n virtual void OnChangesApplied(\n ModelType model_type,\n int64 model_version,\n const BaseTransaction* trans,\n const ImmutableChangeRecordList& changes) OVERRIDE {}\n\n virtual void OnChangesComplete(ModelType model_type) OVERRIDE {}\n\n // Helper methods.\n bool SetUpEncryption(NigoriStatus nigori_status,\n EncryptionStatus encryption_status) {\n UserShare* share = sync_manager_.GetUserShare();\n\n // We need to create the nigori node as if it were an applied server update.\n int64 nigori_id = GetIdForDataType(NIGORI);\n if (nigori_id == kInvalidId)\n return false;\n\n // Set the nigori cryptographer information.\n if (encryption_status == FULL_ENCRYPTION)\n sync_manager_.GetEncryptionHandler()->EnableEncryptEverything();\n\n WriteTransaction trans(FROM_HERE, share);\n Cryptographer* cryptographer = trans.GetCryptographer();\n if (!cryptographer)\n return false;\n if (encryption_status != UNINITIALIZED) {\n KeyParams params = {\"localhost\", \"dummy\", \"foobar\"};\n cryptographer->AddKey(params);\n } else {\n DCHECK_NE(nigori_status, WRITE_TO_NIGORI);\n }\n if (nigori_status == WRITE_TO_NIGORI) {\n sync_pb::NigoriSpecifics nigori;\n cryptographer->GetKeys(nigori.mutable_encryption_keybag());\n share->directory->GetNigoriHandler()->UpdateNigoriFromEncryptedTypes(\n &nigori,\n trans.GetWrappedTrans());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByIdLookup(nigori_id));\n node.SetNigoriSpecifics(nigori);\n }\n return cryptographer->is_ready();\n }\n\n int64 GetIdForDataType(ModelType type) {\n if (type_roots_.count(type) == 0)\n return 0;\n return type_roots_[type];\n }\n\n void PumpLoop() {\n message_loop_.RunUntilIdle();\n }\n\n void SendJsMessage(const std::string& name, const JsArgList& args,\n const WeakHandle& reply_handler) {\n js_backend_.Call(FROM_HERE, &JsBackend::ProcessJsMessage,\n name, args, reply_handler);\n PumpLoop();\n }\n\n void SetJsEventHandler(const WeakHandle& event_handler) {\n js_backend_.Call(FROM_HERE, &JsBackend::SetJsEventHandler,\n event_handler);\n PumpLoop();\n }\n\n // Looks up an entry by client tag and resets IS_UNSYNCED value to false.\n // Returns true if entry was previously unsynced, false if IS_UNSYNCED was\n // already false.\n bool ResetUnsyncedEntry(ModelType type,\n const std::string& client_tag) {\n UserShare* share = sync_manager_.GetUserShare();\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::UNITTEST, share->directory.get());\n const std::string hash = syncable::GenerateSyncableHash(type, client_tag);\n syncable::MutableEntry entry(&trans, syncable::GET_BY_CLIENT_TAG,\n hash);\n EXPECT_TRUE(entry.good());\n if (!entry.Get(IS_UNSYNCED))\n return false;\n entry.Put(IS_UNSYNCED, false);\n return true;\n }\n\n virtual InternalComponentsFactory* GetFactory() {\n return new TestInternalComponentsFactory(GetSwitches(), STORAGE_IN_MEMORY);\n }\n\n // Returns true if we are currently encrypting all sync data. May\n // be called on any thread.\n bool EncryptEverythingEnabledForTest() {\n return sync_manager_.GetEncryptionHandler()->EncryptEverythingEnabled();\n }\n\n // Gets the set of encrypted types from the cryptographer\n // Note: opens a transaction. May be called from any thread.\n ModelTypeSet GetEncryptedTypes() {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n return GetEncryptedTypesWithTrans(&trans);\n }\n\n ModelTypeSet GetEncryptedTypesWithTrans(BaseTransaction* trans) {\n return trans->GetDirectory()->GetNigoriHandler()->\n GetEncryptedTypes(trans->GetWrappedTrans());\n }\n\n void SimulateInvalidatorStateChangeForTest(InvalidatorState state) {\n DCHECK(sync_manager_.thread_checker_.CalledOnValidThread());\n sync_manager_.OnInvalidatorStateChange(state);\n }\n\n void TriggerOnIncomingNotificationForTest(ModelTypeSet model_types) {\n DCHECK(sync_manager_.thread_checker_.CalledOnValidThread());\n ModelTypeInvalidationMap invalidation_map =\n ModelTypeSetToInvalidationMap(model_types, std::string());\n sync_manager_.OnIncomingInvalidation(\n ModelTypeInvalidationMapToObjectIdInvalidationMap(\n invalidation_map));\n }\n\n void SetProgressMarkerForType(ModelType type, bool set) {\n if (set) {\n sync_pb::DataTypeProgressMarker marker;\n marker.set_token(\"token\");\n marker.set_data_type_id(GetSpecificsFieldNumberFromModelType(type));\n sync_manager_.directory()->SetDownloadProgress(type, marker);\n } else {\n sync_pb::DataTypeProgressMarker marker;\n sync_manager_.directory()->SetDownloadProgress(type, marker);\n }\n }\n\n InternalComponentsFactory::Switches GetSwitches() const {\n return switches_;\n }\n\n private:\n // Needed by |sync_manager_|.\n base::MessageLoop message_loop_;\n // Needed by |sync_manager_|.\n base::ScopedTempDir temp_dir_;\n // Sync Id's for the roots of the enabled datatypes.\n std::map type_roots_;\n FakeExtensionsActivityMonitor extensions_activity_monitor_;\n\n protected:\n FakeEncryptor encryptor_;\n TestUnrecoverableErrorHandler handler_;\n FakeInvalidator* fake_invalidator_;\n SyncManagerImpl sync_manager_;\n WeakHandle js_backend_;\n StrictMock manager_observer_;\n StrictMock encryption_observer_;\n InternalComponentsFactory::Switches switches_;\n};\n\nTEST_F(SyncManagerTest, UpdateEnabledTypes) {\n ModelSafeRoutingInfo routes;\n GetModelSafeRoutingInfo(&routes);\n const ModelTypeSet enabled_types = GetRoutingInfoTypes(routes);\n sync_manager_.UpdateEnabledTypes(enabled_types);\n EXPECT_EQ(ModelTypeSetToObjectIdSet(enabled_types),\n fake_invalidator_->GetRegisteredIds(&sync_manager_));\n}\n\nTEST_F(SyncManagerTest, RegisterInvalidationHandler) {\n FakeInvalidationHandler fake_handler;\n sync_manager_.RegisterInvalidationHandler(&fake_handler);\n EXPECT_TRUE(fake_invalidator_->IsHandlerRegistered(&fake_handler));\n\n const ObjectIdSet& ids =\n ModelTypeSetToObjectIdSet(ModelTypeSet(BOOKMARKS, PREFERENCES));\n sync_manager_.UpdateRegisteredInvalidationIds(&fake_handler, ids);\n EXPECT_EQ(ids, fake_invalidator_->GetRegisteredIds(&fake_handler));\n\n sync_manager_.UnregisterInvalidationHandler(&fake_handler);\n EXPECT_FALSE(fake_invalidator_->IsHandlerRegistered(&fake_handler));\n}\n\nTEST_F(SyncManagerTest, ProcessJsMessage) {\n const JsArgList kNoArgs;\n\n StrictMock reply_handler;\n\n base::ListValue disabled_args;\n disabled_args.Append(new base::StringValue(\"TRANSIENT_INVALIDATION_ERROR\"));\n\n EXPECT_CALL(reply_handler,\n HandleJsReply(\"getNotificationState\",\n HasArgsAsList(disabled_args)));\n\n // This message should be dropped.\n SendJsMessage(\"unknownMessage\", kNoArgs, reply_handler.AsWeakHandle());\n\n SendJsMessage(\"getNotificationState\", kNoArgs, reply_handler.AsWeakHandle());\n}\n\nTEST_F(SyncManagerTest, ProcessJsMessageGetRootNodeDetails) {\n const JsArgList kNoArgs;\n\n StrictMock reply_handler;\n\n JsArgList return_args;\n\n EXPECT_CALL(reply_handler,\n HandleJsReply(\"getRootNodeDetails\", _))\n .WillOnce(SaveArg<1>(&return_args));\n\n SendJsMessage(\"getRootNodeDetails\", kNoArgs, reply_handler.AsWeakHandle());\n\n EXPECT_EQ(1u, return_args.Get().GetSize());\n const base::DictionaryValue* node_info = NULL;\n EXPECT_TRUE(return_args.Get().GetDictionary(0, &node_info));\n if (node_info) {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n node.InitByRootLookup();\n CheckNodeValue(node, *node_info, true);\n } else {\n ADD_FAILURE();\n }\n}\n\nvoid CheckGetNodesByIdReturnArgs(SyncManager* sync_manager,\n const JsArgList& return_args,\n int64 id,\n bool is_detailed) {\n EXPECT_EQ(1u, return_args.Get().GetSize());\n const base::ListValue* nodes = NULL;\n ASSERT_TRUE(return_args.Get().GetList(0, &nodes));\n ASSERT_TRUE(nodes);\n EXPECT_EQ(1u, nodes->GetSize());\n const base::DictionaryValue* node_info = NULL;\n EXPECT_TRUE(nodes->GetDictionary(0, &node_info));\n ASSERT_TRUE(node_info);\n ReadTransaction trans(FROM_HERE, sync_manager->GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByIdLookup(id));\n CheckNodeValue(node, *node_info, is_detailed);\n}\n\nclass SyncManagerGetNodesByIdTest : public SyncManagerTest {\n protected:\n virtual ~SyncManagerGetNodesByIdTest() {}\n\n void RunGetNodesByIdTest(const char* message_name, bool is_detailed) {\n int64 root_id = kInvalidId;\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n root_id = root_node.GetId();\n }\n\n int64 child_id =\n MakeNode(sync_manager_.GetUserShare(), BOOKMARKS, \"testtag\");\n\n StrictMock reply_handler;\n\n JsArgList return_args;\n\n const int64 ids[] = { root_id, child_id };\n\n EXPECT_CALL(reply_handler,\n HandleJsReply(message_name, _))\n .Times(arraysize(ids)).WillRepeatedly(SaveArg<1>(&return_args));\n\n for (size_t i = 0; i < arraysize(ids); ++i) {\n base::ListValue args;\n base::ListValue* id_values = new base::ListValue();\n args.Append(id_values);\n id_values->Append(new base::StringValue(base::Int64ToString(ids[i])));\n SendJsMessage(message_name,\n JsArgList(&args), reply_handler.AsWeakHandle());\n\n CheckGetNodesByIdReturnArgs(&sync_manager_, return_args,\n ids[i], is_detailed);\n }\n }\n\n void RunGetNodesByIdFailureTest(const char* message_name) {\n StrictMock reply_handler;\n\n base::ListValue empty_list_args;\n empty_list_args.Append(new base::ListValue());\n\n EXPECT_CALL(reply_handler,\n HandleJsReply(message_name,\n HasArgsAsList(empty_list_args)))\n .Times(6);\n\n {\n base::ListValue args;\n SendJsMessage(message_name,\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n {\n base::ListValue args;\n args.Append(new base::ListValue());\n SendJsMessage(message_name,\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n {\n base::ListValue args;\n base::ListValue* ids = new base::ListValue();\n args.Append(ids);\n ids->Append(new base::StringValue(std::string()));\n SendJsMessage(\n message_name, JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n {\n base::ListValue args;\n base::ListValue* ids = new base::ListValue();\n args.Append(ids);\n ids->Append(new base::StringValue(\"nonsense\"));\n SendJsMessage(message_name,\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n {\n base::ListValue args;\n base::ListValue* ids = new base::ListValue();\n args.Append(ids);\n ids->Append(new base::StringValue(\"0\"));\n SendJsMessage(message_name,\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n {\n base::ListValue args;\n base::ListValue* ids = new base::ListValue();\n args.Append(ids);\n ids->Append(new base::StringValue(\"9999\"));\n SendJsMessage(message_name,\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n }\n};\n\nTEST_F(SyncManagerGetNodesByIdTest, GetNodeSummariesById) {\n RunGetNodesByIdTest(\"getNodeSummariesById\", false);\n}\n\nTEST_F(SyncManagerGetNodesByIdTest, GetNodeDetailsById) {\n RunGetNodesByIdTest(\"getNodeDetailsById\", true);\n}\n\nTEST_F(SyncManagerGetNodesByIdTest, GetNodeSummariesByIdFailure) {\n RunGetNodesByIdFailureTest(\"getNodeSummariesById\");\n}\n\nTEST_F(SyncManagerGetNodesByIdTest, GetNodeDetailsByIdFailure) {\n RunGetNodesByIdFailureTest(\"getNodeDetailsById\");\n}\n\nTEST_F(SyncManagerTest, GetChildNodeIds) {\n StrictMock reply_handler;\n\n JsArgList return_args;\n\n EXPECT_CALL(reply_handler,\n HandleJsReply(\"getChildNodeIds\", _))\n .Times(1).WillRepeatedly(SaveArg<1>(&return_args));\n\n {\n base::ListValue args;\n args.Append(new base::StringValue(\"1\"));\n SendJsMessage(\"getChildNodeIds\",\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n EXPECT_EQ(1u, return_args.Get().GetSize());\n const base::ListValue* nodes = NULL;\n ASSERT_TRUE(return_args.Get().GetList(0, &nodes));\n ASSERT_TRUE(nodes);\n EXPECT_EQ(9u, nodes->GetSize());\n}\n\nTEST_F(SyncManagerTest, GetChildNodeIdsFailure) {\n StrictMock reply_handler;\n\n base::ListValue empty_list_args;\n empty_list_args.Append(new base::ListValue());\n\n EXPECT_CALL(reply_handler,\n HandleJsReply(\"getChildNodeIds\",\n HasArgsAsList(empty_list_args)))\n .Times(5);\n\n {\n base::ListValue args;\n SendJsMessage(\"getChildNodeIds\",\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n {\n base::ListValue args;\n args.Append(new base::StringValue(std::string()));\n SendJsMessage(\n \"getChildNodeIds\", JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n {\n base::ListValue args;\n args.Append(new base::StringValue(\"nonsense\"));\n SendJsMessage(\"getChildNodeIds\",\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n {\n base::ListValue args;\n args.Append(new base::StringValue(\"0\"));\n SendJsMessage(\"getChildNodeIds\",\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n {\n base::ListValue args;\n args.Append(new base::StringValue(\"9999\"));\n SendJsMessage(\"getChildNodeIds\",\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n}\n\nTEST_F(SyncManagerTest, GetAllNodesTest) {\n StrictMock reply_handler;\n JsArgList return_args;\n\n EXPECT_CALL(reply_handler,\n HandleJsReply(\"getAllNodes\", _))\n .Times(1).WillRepeatedly(SaveArg<1>(&return_args));\n\n {\n base::ListValue args;\n SendJsMessage(\"getAllNodes\",\n JsArgList(&args), reply_handler.AsWeakHandle());\n }\n\n // There's not much value in verifying every attribute on every node here.\n // Most of the value of this test has already been achieved: we've verified we\n // can call the above function without crashing or leaking memory.\n //\n // Let's just check the list size and a few of its elements. Anything more\n // would make this test brittle without greatly increasing our chances of\n // catching real bugs.\n\n const base::ListValue* node_list;\n const base::DictionaryValue* first_result;\n\n // The resulting argument list should have one argument, a list of nodes.\n ASSERT_EQ(1U, return_args.Get().GetSize());\n ASSERT_TRUE(return_args.Get().GetList(0, &node_list));\n\n // The database creation logic depends on the routing info.\n // Refer to setup methods for more information.\n ModelSafeRoutingInfo routes;\n GetModelSafeRoutingInfo(&routes);\n size_t directory_size = routes.size() + 1;\n\n ASSERT_EQ(directory_size, node_list->GetSize());\n ASSERT_TRUE(node_list->GetDictionary(0, &first_result));\n EXPECT_TRUE(first_result->HasKey(\"ID\"));\n EXPECT_TRUE(first_result->HasKey(\"NON_UNIQUE_NAME\"));\n}\n\n// Simulate various invalidator state changes. Those should propagate\n// JS events.\nTEST_F(SyncManagerTest, OnInvalidatorStateChangeJsEvents) {\n StrictMock event_handler;\n\n base::DictionaryValue enabled_details;\n enabled_details.SetString(\"state\", \"INVALIDATIONS_ENABLED\");\n base::DictionaryValue credentials_rejected_details;\n credentials_rejected_details.SetString(\n \"state\", \"INVALIDATION_CREDENTIALS_REJECTED\");\n base::DictionaryValue transient_error_details;\n transient_error_details.SetString(\"state\", \"TRANSIENT_INVALIDATION_ERROR\");\n base::DictionaryValue auth_error_details;\n auth_error_details.SetString(\"status\", \"CONNECTION_AUTH_ERROR\");\n\n EXPECT_CALL(manager_observer_,\n OnConnectionStatusChange(CONNECTION_AUTH_ERROR));\n\n EXPECT_CALL(\n event_handler,\n HandleJsEvent(\"onConnectionStatusChange\",\n HasDetailsAsDictionary(auth_error_details)));\n\n EXPECT_CALL(event_handler,\n HandleJsEvent(\"onNotificationStateChange\",\n HasDetailsAsDictionary(enabled_details)));\n\n EXPECT_CALL(\n event_handler,\n HandleJsEvent(\"onNotificationStateChange\",\n HasDetailsAsDictionary(credentials_rejected_details)))\n .Times(2);\n\n EXPECT_CALL(event_handler,\n HandleJsEvent(\"onNotificationStateChange\",\n HasDetailsAsDictionary(transient_error_details)));\n\n // Test needs to simulate INVALIDATION_CREDENTIALS_REJECTED with event handler\n // attached because this is the only time when CONNECTION_AUTH_ERROR\n // notification will be generated, therefore the only chance to verify that\n // \"onConnectionStatusChange\" event is delivered\n SetJsEventHandler(event_handler.AsWeakHandle());\n SimulateInvalidatorStateChangeForTest(INVALIDATION_CREDENTIALS_REJECTED);\n SetJsEventHandler(WeakHandle());\n\n SimulateInvalidatorStateChangeForTest(INVALIDATIONS_ENABLED);\n SimulateInvalidatorStateChangeForTest(INVALIDATION_CREDENTIALS_REJECTED);\n SimulateInvalidatorStateChangeForTest(TRANSIENT_INVALIDATION_ERROR);\n\n SetJsEventHandler(event_handler.AsWeakHandle());\n SimulateInvalidatorStateChangeForTest(INVALIDATIONS_ENABLED);\n SimulateInvalidatorStateChangeForTest(INVALIDATION_CREDENTIALS_REJECTED);\n SimulateInvalidatorStateChangeForTest(TRANSIENT_INVALIDATION_ERROR);\n SetJsEventHandler(WeakHandle());\n\n SimulateInvalidatorStateChangeForTest(INVALIDATIONS_ENABLED);\n SimulateInvalidatorStateChangeForTest(INVALIDATION_CREDENTIALS_REJECTED);\n SimulateInvalidatorStateChangeForTest(TRANSIENT_INVALIDATION_ERROR);\n\n // Should trigger the replies.\n PumpLoop();\n}\n\n// Simulate the invalidator's credentials being rejected. That should\n// also clear the sync token.\nTEST_F(SyncManagerTest, OnInvalidatorStateChangeCredentialsRejected) {\n EXPECT_CALL(manager_observer_,\n OnConnectionStatusChange(CONNECTION_AUTH_ERROR));\n\n EXPECT_FALSE(sync_manager_.GetHasInvalidAuthTokenForTest());\n\n SimulateInvalidatorStateChangeForTest(INVALIDATION_CREDENTIALS_REJECTED);\n\n EXPECT_TRUE(sync_manager_.GetHasInvalidAuthTokenForTest());\n\n // Should trigger the replies.\n PumpLoop();\n}\n\nTEST_F(SyncManagerTest, OnIncomingNotification) {\n StrictMock event_handler;\n\n const ModelTypeSet empty_model_types;\n const ModelTypeSet model_types(\n BOOKMARKS, THEMES);\n\n // Build expected_args to have a single argument with the string\n // equivalents of model_types.\n base::DictionaryValue expected_details;\n {\n base::ListValue* model_type_list = new base::ListValue();\n expected_details.SetString(\"source\", \"REMOTE_INVALIDATION\");\n expected_details.Set(\"changedTypes\", model_type_list);\n for (ModelTypeSet::Iterator it = model_types.First();\n it.Good(); it.Inc()) {\n model_type_list->Append(\n new base::StringValue(ModelTypeToString(it.Get())));\n }\n }\n\n EXPECT_CALL(event_handler,\n HandleJsEvent(\"onIncomingNotification\",\n HasDetailsAsDictionary(expected_details)));\n\n TriggerOnIncomingNotificationForTest(empty_model_types);\n TriggerOnIncomingNotificationForTest(model_types);\n\n SetJsEventHandler(event_handler.AsWeakHandle());\n TriggerOnIncomingNotificationForTest(model_types);\n SetJsEventHandler(WeakHandle());\n\n TriggerOnIncomingNotificationForTest(empty_model_types);\n TriggerOnIncomingNotificationForTest(model_types);\n\n // Should trigger the replies.\n PumpLoop();\n}\n\nTEST_F(SyncManagerTest, RefreshEncryptionReady) {\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_, OnEncryptedTypesChanged(_, false));\n\n sync_manager_.GetEncryptionHandler()->Init();\n PumpLoop();\n\n const ModelTypeSet encrypted_types = GetEncryptedTypes();\n EXPECT_TRUE(encrypted_types.Has(PASSWORDS));\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByIdLookup(GetIdForDataType(NIGORI)));\n sync_pb::NigoriSpecifics nigori = node.GetNigoriSpecifics();\n EXPECT_TRUE(nigori.has_encryption_keybag());\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n EXPECT_TRUE(cryptographer->CanDecrypt(nigori.encryption_keybag()));\n }\n}\n\n// Attempt to refresh encryption when nigori not downloaded.\nTEST_F(SyncManagerTest, RefreshEncryptionNotReady) {\n // Don't set up encryption (no nigori node created).\n\n // Should fail. Triggers an OnPassphraseRequired because the cryptographer\n // is not ready.\n EXPECT_CALL(encryption_observer_, OnPassphraseRequired(_, _)).Times(1);\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_, OnEncryptedTypesChanged(_, false));\n sync_manager_.GetEncryptionHandler()->Init();\n PumpLoop();\n\n const ModelTypeSet encrypted_types = GetEncryptedTypes();\n EXPECT_TRUE(encrypted_types.Has(PASSWORDS)); // Hardcoded.\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n}\n\n// Attempt to refresh encryption when nigori is empty.\nTEST_F(SyncManagerTest, RefreshEncryptionEmptyNigori) {\n EXPECT_TRUE(SetUpEncryption(DONT_WRITE_NIGORI, DEFAULT_ENCRYPTION));\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete()).Times(1);\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_, OnEncryptedTypesChanged(_, false));\n\n // Should write to nigori.\n sync_manager_.GetEncryptionHandler()->Init();\n PumpLoop();\n\n const ModelTypeSet encrypted_types = GetEncryptedTypes();\n EXPECT_TRUE(encrypted_types.Has(PASSWORDS)); // Hardcoded.\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByIdLookup(GetIdForDataType(NIGORI)));\n sync_pb::NigoriSpecifics nigori = node.GetNigoriSpecifics();\n EXPECT_TRUE(nigori.has_encryption_keybag());\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n EXPECT_TRUE(cryptographer->CanDecrypt(nigori.encryption_keybag()));\n }\n}\n\nTEST_F(SyncManagerTest, EncryptDataTypesWithNoData) {\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n EXPECT_CALL(encryption_observer_,\n OnEncryptedTypesChanged(\n HasModelTypes(EncryptableUserTypes()), true));\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n sync_manager_.GetEncryptionHandler()->EnableEncryptEverything();\n EXPECT_TRUE(EncryptEverythingEnabledForTest());\n}\n\nTEST_F(SyncManagerTest, EncryptDataTypesWithData) {\n size_t batch_size = 5;\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n\n // Create some unencrypted unsynced data.\n int64 folder = MakeFolderWithParent(sync_manager_.GetUserShare(),\n BOOKMARKS,\n GetIdForDataType(BOOKMARKS),\n NULL);\n // First batch_size nodes are children of folder.\n size_t i;\n for (i = 0; i < batch_size; ++i) {\n MakeBookmarkWithParent(sync_manager_.GetUserShare(), folder, NULL);\n }\n // Next batch_size nodes are a different type and on their own.\n for (; i < 2*batch_size; ++i) {\n MakeNode(sync_manager_.GetUserShare(), SESSIONS,\n base::StringPrintf(\"%\" PRIuS \"\", i));\n }\n // Last batch_size nodes are a third type that will not need encryption.\n for (; i < 3*batch_size; ++i) {\n MakeNode(sync_manager_.GetUserShare(), THEMES,\n base::StringPrintf(\"%\" PRIuS \"\", i));\n }\n\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n EXPECT_TRUE(GetEncryptedTypesWithTrans(&trans).Equals(\n SyncEncryptionHandler::SensitiveTypes()));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n BOOKMARKS,\n false /* not encrypted */));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n SESSIONS,\n false /* not encrypted */));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n THEMES,\n false /* not encrypted */));\n }\n\n EXPECT_CALL(encryption_observer_,\n OnEncryptedTypesChanged(\n HasModelTypes(EncryptableUserTypes()), true));\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n sync_manager_.GetEncryptionHandler()->EnableEncryptEverything();\n EXPECT_TRUE(EncryptEverythingEnabledForTest());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n EXPECT_TRUE(GetEncryptedTypesWithTrans(&trans).Equals(\n EncryptableUserTypes()));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n BOOKMARKS,\n true /* is encrypted */));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n SESSIONS,\n true /* is encrypted */));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n THEMES,\n true /* is encrypted */));\n }\n\n // Trigger's a ReEncryptEverything with new passphrase.\n testing::Mock::VerifyAndClearExpectations(&encryption_observer_);\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_,\n OnPassphraseTypeChanged(CUSTOM_PASSPHRASE, _));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"new_passphrase\", true);\n EXPECT_TRUE(EncryptEverythingEnabledForTest());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n EXPECT_TRUE(GetEncryptedTypesWithTrans(&trans).Equals(\n EncryptableUserTypes()));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n BOOKMARKS,\n true /* is encrypted */));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n SESSIONS,\n true /* is encrypted */));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n THEMES,\n true /* is encrypted */));\n }\n // Calling EncryptDataTypes with an empty encrypted types should not trigger\n // a reencryption and should just notify immediately.\n testing::Mock::VerifyAndClearExpectations(&encryption_observer_);\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN)).Times(0);\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted()).Times(0);\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete()).Times(0);\n sync_manager_.GetEncryptionHandler()->EnableEncryptEverything();\n}\n\n// Test that when there are no pending keys and the cryptographer is not\n// initialized, we add a key based on the current GAIA password.\n// (case 1 in SyncManager::SyncInternal::SetEncryptionPassphrase)\nTEST_F(SyncManagerTest, SetInitialGaiaPass) {\n EXPECT_FALSE(SetUpEncryption(DONT_WRITE_NIGORI, UNINITIALIZED));\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"new_passphrase\",\n false);\n EXPECT_EQ(IMPLICIT_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByTagLookup(kNigoriTag));\n sync_pb::NigoriSpecifics nigori = node.GetNigoriSpecifics();\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n EXPECT_TRUE(cryptographer->CanDecrypt(nigori.encryption_keybag()));\n }\n}\n\n// Test that when there are no pending keys and we have on the old GAIA\n// password, we update and re-encrypt everything with the new GAIA password.\n// (case 1 in SyncManager::SyncInternal::SetEncryptionPassphrase)\nTEST_F(SyncManagerTest, UpdateGaiaPass) {\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n Cryptographer verifier(&encryptor_);\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n std::string bootstrap_token;\n cryptographer->GetBootstrapToken(&bootstrap_token);\n verifier.Bootstrap(bootstrap_token);\n }\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"new_passphrase\",\n false);\n EXPECT_EQ(IMPLICIT_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n // Verify the default key has changed.\n sync_pb::EncryptedData encrypted;\n cryptographer->GetKeys(&encrypted);\n EXPECT_FALSE(verifier.CanDecrypt(encrypted));\n }\n}\n\n// Sets a new explicit passphrase. This should update the bootstrap token\n// and re-encrypt everything.\n// (case 2 in SyncManager::SyncInternal::SetEncryptionPassphrase)\nTEST_F(SyncManagerTest, SetPassphraseWithPassword) {\n Cryptographer verifier(&encryptor_);\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n // Store the default (soon to be old) key.\n Cryptographer* cryptographer = trans.GetCryptographer();\n std::string bootstrap_token;\n cryptographer->GetBootstrapToken(&bootstrap_token);\n verifier.Bootstrap(bootstrap_token);\n\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n\n WriteNode password_node(&trans);\n WriteNode::InitUniqueByCreationResult result =\n password_node.InitUniqueByCreation(PASSWORDS,\n root_node, \"foo\");\n EXPECT_EQ(WriteNode::INIT_SUCCESS, result);\n sync_pb::PasswordSpecificsData data;\n data.set_password_value(\"secret\");\n password_node.SetPasswordSpecifics(data);\n }\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_,\n OnPassphraseTypeChanged(CUSTOM_PASSPHRASE, _));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"new_passphrase\",\n true);\n EXPECT_EQ(CUSTOM_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n // Verify the default key has changed.\n sync_pb::EncryptedData encrypted;\n cryptographer->GetKeys(&encrypted);\n EXPECT_FALSE(verifier.CanDecrypt(encrypted));\n\n ReadNode password_node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n password_node.InitByClientTagLookup(PASSWORDS,\n \"foo\"));\n const sync_pb::PasswordSpecificsData& data =\n password_node.GetPasswordSpecifics();\n EXPECT_EQ(\"secret\", data.password_value());\n }\n}\n\n// Manually set the pending keys in the cryptographer/nigori to reflect the data\n// being encrypted with a new (unprovided) GAIA password, then supply the\n// password.\n// (case 7 in SyncManager::SyncInternal::SetDecryptionPassphrase)\nTEST_F(SyncManagerTest, SupplyPendingGAIAPass) {\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n Cryptographer other_cryptographer(&encryptor_);\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n std::string bootstrap_token;\n cryptographer->GetBootstrapToken(&bootstrap_token);\n other_cryptographer.Bootstrap(bootstrap_token);\n\n // Now update the nigori to reflect the new keys, and update the\n // cryptographer to have pending keys.\n KeyParams params = {\"localhost\", \"dummy\", \"passphrase2\"};\n other_cryptographer.AddKey(params);\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByTagLookup(kNigoriTag));\n sync_pb::NigoriSpecifics nigori;\n other_cryptographer.GetKeys(nigori.mutable_encryption_keybag());\n cryptographer->SetPendingKeys(nigori.encryption_keybag());\n EXPECT_TRUE(cryptographer->has_pending_keys());\n node.SetNigoriSpecifics(nigori);\n }\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n sync_manager_.GetEncryptionHandler()->SetDecryptionPassphrase(\"passphrase2\");\n EXPECT_EQ(IMPLICIT_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n // Verify we're encrypting with the new key.\n sync_pb::EncryptedData encrypted;\n cryptographer->GetKeys(&encrypted);\n EXPECT_TRUE(other_cryptographer.CanDecrypt(encrypted));\n }\n}\n\n// Manually set the pending keys in the cryptographer/nigori to reflect the data\n// being encrypted with an old (unprovided) GAIA password. Attempt to supply\n// the current GAIA password and verify the bootstrap token is updated. Then\n// supply the old GAIA password, and verify we re-encrypt all data with the\n// new GAIA password.\n// (cases 4 and 5 in SyncManager::SyncInternal::SetEncryptionPassphrase)\nTEST_F(SyncManagerTest, SupplyPendingOldGAIAPass) {\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n Cryptographer other_cryptographer(&encryptor_);\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n std::string bootstrap_token;\n cryptographer->GetBootstrapToken(&bootstrap_token);\n other_cryptographer.Bootstrap(bootstrap_token);\n\n // Now update the nigori to reflect the new keys, and update the\n // cryptographer to have pending keys.\n KeyParams params = {\"localhost\", \"dummy\", \"old_gaia\"};\n other_cryptographer.AddKey(params);\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByTagLookup(kNigoriTag));\n sync_pb::NigoriSpecifics nigori;\n other_cryptographer.GetKeys(nigori.mutable_encryption_keybag());\n node.SetNigoriSpecifics(nigori);\n cryptographer->SetPendingKeys(nigori.encryption_keybag());\n\n // other_cryptographer now contains all encryption keys, and is encrypting\n // with the newest gaia.\n KeyParams new_params = {\"localhost\", \"dummy\", \"new_gaia\"};\n other_cryptographer.AddKey(new_params);\n }\n // The bootstrap token should have been updated. Save it to ensure it's based\n // on the new GAIA password.\n std::string bootstrap_token;\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN))\n .WillOnce(SaveArg<0>(&bootstrap_token));\n EXPECT_CALL(encryption_observer_, OnPassphraseRequired(_,_));\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"new_gaia\",\n false);\n EXPECT_EQ(IMPLICIT_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n testing::Mock::VerifyAndClearExpectations(&encryption_observer_);\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_initialized());\n EXPECT_FALSE(cryptographer->is_ready());\n // Verify we're encrypting with the new key, even though we have pending\n // keys.\n sync_pb::EncryptedData encrypted;\n other_cryptographer.GetKeys(&encrypted);\n EXPECT_TRUE(cryptographer->CanDecrypt(encrypted));\n }\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"old_gaia\",\n false);\n EXPECT_EQ(IMPLICIT_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n\n // Verify we're encrypting with the new key.\n sync_pb::EncryptedData encrypted;\n other_cryptographer.GetKeys(&encrypted);\n EXPECT_TRUE(cryptographer->CanDecrypt(encrypted));\n\n // Verify the saved bootstrap token is based on the new gaia password.\n Cryptographer temp_cryptographer(&encryptor_);\n temp_cryptographer.Bootstrap(bootstrap_token);\n EXPECT_TRUE(temp_cryptographer.CanDecrypt(encrypted));\n }\n}\n\n// Manually set the pending keys in the cryptographer/nigori to reflect the data\n// being encrypted with an explicit (unprovided) passphrase, then supply the\n// passphrase.\n// (case 9 in SyncManager::SyncInternal::SetDecryptionPassphrase)\nTEST_F(SyncManagerTest, SupplyPendingExplicitPass) {\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n Cryptographer other_cryptographer(&encryptor_);\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n std::string bootstrap_token;\n cryptographer->GetBootstrapToken(&bootstrap_token);\n other_cryptographer.Bootstrap(bootstrap_token);\n\n // Now update the nigori to reflect the new keys, and update the\n // cryptographer to have pending keys.\n KeyParams params = {\"localhost\", \"dummy\", \"explicit\"};\n other_cryptographer.AddKey(params);\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByTagLookup(kNigoriTag));\n sync_pb::NigoriSpecifics nigori;\n other_cryptographer.GetKeys(nigori.mutable_encryption_keybag());\n cryptographer->SetPendingKeys(nigori.encryption_keybag());\n EXPECT_TRUE(cryptographer->has_pending_keys());\n nigori.set_keybag_is_frozen(true);\n node.SetNigoriSpecifics(nigori);\n }\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_,\n OnPassphraseTypeChanged(CUSTOM_PASSPHRASE, _));\n EXPECT_CALL(encryption_observer_, OnPassphraseRequired(_, _));\n EXPECT_CALL(encryption_observer_, OnEncryptedTypesChanged(_, false));\n sync_manager_.GetEncryptionHandler()->Init();\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n sync_manager_.GetEncryptionHandler()->SetDecryptionPassphrase(\"explicit\");\n EXPECT_EQ(CUSTOM_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n // Verify we're encrypting with the new key.\n sync_pb::EncryptedData encrypted;\n cryptographer->GetKeys(&encrypted);\n EXPECT_TRUE(other_cryptographer.CanDecrypt(encrypted));\n }\n}\n\n// Manually set the pending keys in the cryptographer/nigori to reflect the data\n// being encrypted with a new (unprovided) GAIA password, then supply the\n// password as a user-provided password.\n// This is the android case 7/8.\nTEST_F(SyncManagerTest, SupplyPendingGAIAPassUserProvided) {\n EXPECT_FALSE(SetUpEncryption(DONT_WRITE_NIGORI, UNINITIALIZED));\n Cryptographer other_cryptographer(&encryptor_);\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n // Now update the nigori to reflect the new keys, and update the\n // cryptographer to have pending keys.\n KeyParams params = {\"localhost\", \"dummy\", \"passphrase\"};\n other_cryptographer.AddKey(params);\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByTagLookup(kNigoriTag));\n sync_pb::NigoriSpecifics nigori;\n other_cryptographer.GetKeys(nigori.mutable_encryption_keybag());\n node.SetNigoriSpecifics(nigori);\n cryptographer->SetPendingKeys(nigori.encryption_keybag());\n EXPECT_FALSE(cryptographer->is_ready());\n }\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"passphrase\",\n false);\n EXPECT_EQ(IMPLICIT_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n }\n}\n\nTEST_F(SyncManagerTest, SetPassphraseWithEmptyPasswordNode) {\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n int64 node_id = 0;\n std::string tag = \"foo\";\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode root_node(&trans);\n root_node.InitByRootLookup();\n\n WriteNode password_node(&trans);\n WriteNode::InitUniqueByCreationResult result =\n password_node.InitUniqueByCreation(PASSWORDS, root_node, tag);\n EXPECT_EQ(WriteNode::INIT_SUCCESS, result);\n node_id = password_node.GetId();\n }\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_,\n OnPassphraseTypeChanged(CUSTOM_PASSPHRASE, _));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"new_passphrase\",\n true);\n EXPECT_EQ(CUSTOM_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n EXPECT_FALSE(EncryptEverythingEnabledForTest());\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode password_node(&trans);\n EXPECT_EQ(BaseNode::INIT_FAILED_DECRYPT_IF_NECESSARY,\n password_node.InitByClientTagLookup(PASSWORDS,\n tag));\n }\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode password_node(&trans);\n EXPECT_EQ(BaseNode::INIT_FAILED_DECRYPT_IF_NECESSARY,\n password_node.InitByIdLookup(node_id));\n }\n}\n\nTEST_F(SyncManagerTest, NudgeDelayTest) {\n EXPECT_EQ(sync_manager_.GetNudgeDelayTimeDelta(BOOKMARKS),\n base::TimeDelta::FromMilliseconds(\n SyncManagerImpl::GetDefaultNudgeDelay()));\n\n EXPECT_EQ(sync_manager_.GetNudgeDelayTimeDelta(AUTOFILL),\n base::TimeDelta::FromSeconds(\n kDefaultShortPollIntervalSeconds));\n\n EXPECT_EQ(sync_manager_.GetNudgeDelayTimeDelta(PREFERENCES),\n base::TimeDelta::FromMilliseconds(\n SyncManagerImpl::GetPreferencesNudgeDelay()));\n}\n\n// Friended by WriteNode, so can't be in an anonymouse namespace.\nTEST_F(SyncManagerTest, EncryptBookmarksWithLegacyData) {\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n std::string title;\n SyncAPINameToServerName(\"Google\", &title);\n std::string url = \"http://www.google.com\";\n std::string raw_title2 = \"..\"; // An invalid cosmo title.\n std::string title2;\n SyncAPINameToServerName(raw_title2, &title2);\n std::string url2 = \"http://www.bla.com\";\n\n // Create a bookmark using the legacy format.\n int64 node_id1 = MakeNode(sync_manager_.GetUserShare(),\n BOOKMARKS,\n \"testtag\");\n int64 node_id2 = MakeNode(sync_manager_.GetUserShare(),\n BOOKMARKS,\n \"testtag2\");\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByIdLookup(node_id1));\n\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_bookmark()->set_url(url);\n node.SetEntitySpecifics(entity_specifics);\n\n // Set the old style title.\n syncable::MutableEntry* node_entry = node.entry_;\n node_entry->Put(syncable::NON_UNIQUE_NAME, title);\n\n WriteNode node2(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node2.InitByIdLookup(node_id2));\n\n sync_pb::EntitySpecifics entity_specifics2;\n entity_specifics2.mutable_bookmark()->set_url(url2);\n node2.SetEntitySpecifics(entity_specifics2);\n\n // Set the old style title.\n syncable::MutableEntry* node_entry2 = node2.entry_;\n node_entry2->Put(syncable::NON_UNIQUE_NAME, title2);\n }\n\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByIdLookup(node_id1));\n EXPECT_EQ(BOOKMARKS, node.GetModelType());\n EXPECT_EQ(title, node.GetTitle());\n EXPECT_EQ(title, node.GetBookmarkSpecifics().title());\n EXPECT_EQ(url, node.GetBookmarkSpecifics().url());\n\n ReadNode node2(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node2.InitByIdLookup(node_id2));\n EXPECT_EQ(BOOKMARKS, node2.GetModelType());\n // We should de-canonicalize the title in GetTitle(), but the title in the\n // specifics should be stored in the server legal form.\n EXPECT_EQ(raw_title2, node2.GetTitle());\n EXPECT_EQ(title2, node2.GetBookmarkSpecifics().title());\n EXPECT_EQ(url2, node2.GetBookmarkSpecifics().url());\n }\n\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n BOOKMARKS,\n false /* not encrypted */));\n }\n\n EXPECT_CALL(encryption_observer_,\n OnEncryptedTypesChanged(\n HasModelTypes(EncryptableUserTypes()), true));\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n sync_manager_.GetEncryptionHandler()->EnableEncryptEverything();\n EXPECT_TRUE(EncryptEverythingEnabledForTest());\n\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n EXPECT_TRUE(GetEncryptedTypesWithTrans(&trans).Equals(\n EncryptableUserTypes()));\n EXPECT_TRUE(syncable::VerifyDataTypeEncryptionForTest(\n trans.GetWrappedTrans(),\n BOOKMARKS,\n true /* is encrypted */));\n\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node.InitByIdLookup(node_id1));\n EXPECT_EQ(BOOKMARKS, node.GetModelType());\n EXPECT_EQ(title, node.GetTitle());\n EXPECT_EQ(title, node.GetBookmarkSpecifics().title());\n EXPECT_EQ(url, node.GetBookmarkSpecifics().url());\n\n ReadNode node2(&trans);\n EXPECT_EQ(BaseNode::INIT_OK, node2.InitByIdLookup(node_id2));\n EXPECT_EQ(BOOKMARKS, node2.GetModelType());\n // We should de-canonicalize the title in GetTitle(), but the title in the\n // specifics should be stored in the server legal form.\n EXPECT_EQ(raw_title2, node2.GetTitle());\n EXPECT_EQ(title2, node2.GetBookmarkSpecifics().title());\n EXPECT_EQ(url2, node2.GetBookmarkSpecifics().url());\n }\n}\n\n// Create a bookmark and set the title/url, then verify the data was properly\n// set. This replicates the unique way bookmarks have of creating sync nodes.\n// See BookmarkChangeProcessor::PlaceSyncNode(..).\nTEST_F(SyncManagerTest, CreateLocalBookmark) {\n std::string title = \"title\";\n std::string url = \"url\";\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode bookmark_root(&trans);\n ASSERT_EQ(BaseNode::INIT_OK,\n bookmark_root.InitByTagLookup(ModelTypeToRootTag(BOOKMARKS)));\n WriteNode node(&trans);\n ASSERT_TRUE(node.InitBookmarkByCreation(bookmark_root, NULL));\n node.SetIsFolder(false);\n node.SetTitle(UTF8ToWide(title));\n\n sync_pb::BookmarkSpecifics bookmark_specifics(node.GetBookmarkSpecifics());\n bookmark_specifics.set_url(url);\n node.SetBookmarkSpecifics(bookmark_specifics);\n }\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode bookmark_root(&trans);\n ASSERT_EQ(BaseNode::INIT_OK,\n bookmark_root.InitByTagLookup(ModelTypeToRootTag(BOOKMARKS)));\n int64 child_id = bookmark_root.GetFirstChildId();\n\n ReadNode node(&trans);\n ASSERT_EQ(BaseNode::INIT_OK, node.InitByIdLookup(child_id));\n EXPECT_FALSE(node.GetIsFolder());\n EXPECT_EQ(title, node.GetTitle());\n EXPECT_EQ(url, node.GetBookmarkSpecifics().url());\n }\n}\n\n// Verifies WriteNode::UpdateEntryWithEncryption does not make unnecessary\n// changes.\nTEST_F(SyncManagerTest, UpdateEntryWithEncryption) {\n std::string client_tag = \"title\";\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_bookmark()->set_url(\"url\");\n entity_specifics.mutable_bookmark()->set_title(\"title\");\n MakeServerNode(sync_manager_.GetUserShare(), BOOKMARKS, client_tag,\n syncable::GenerateSyncableHash(BOOKMARKS,\n client_tag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n // Manually change to the same data. Should not set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n node.SetEntitySpecifics(entity_specifics);\n }\n EXPECT_FALSE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Encrypt the datatatype, should set is_unsynced.\n EXPECT_CALL(encryption_observer_,\n OnEncryptedTypesChanged(\n HasModelTypes(EncryptableUserTypes()), true));\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, FULL_ENCRYPTION));\n\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_, OnEncryptedTypesChanged(_, true));\n sync_manager_.GetEncryptionHandler()->Init();\n PumpLoop();\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n const syncable::Entry* node_entry = node.GetEntry();\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n EXPECT_TRUE(cryptographer->CanDecryptUsingDefaultKey(\n specifics.encrypted()));\n }\n EXPECT_TRUE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Set a new passphrase. Should set is_unsynced.\n testing::Mock::VerifyAndClearExpectations(&encryption_observer_);\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_,\n OnPassphraseTypeChanged(CUSTOM_PASSPHRASE, _));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"new_passphrase\",\n true);\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n const syncable::Entry* node_entry = node.GetEntry();\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->is_ready());\n EXPECT_TRUE(cryptographer->CanDecryptUsingDefaultKey(\n specifics.encrypted()));\n }\n EXPECT_TRUE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Force a re-encrypt everything. Should not set is_unsynced.\n testing::Mock::VerifyAndClearExpectations(&encryption_observer_);\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_, OnEncryptedTypesChanged(_, true));\n\n sync_manager_.GetEncryptionHandler()->Init();\n PumpLoop();\n\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n const syncable::Entry* node_entry = node.GetEntry();\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->CanDecryptUsingDefaultKey(\n specifics.encrypted()));\n }\n EXPECT_FALSE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Manually change to the same data. Should not set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n node.SetEntitySpecifics(entity_specifics);\n const syncable::Entry* node_entry = node.GetEntry();\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n EXPECT_FALSE(node_entry->Get(IS_UNSYNCED));\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->CanDecryptUsingDefaultKey(\n specifics.encrypted()));\n }\n EXPECT_FALSE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Manually change to different data. Should set is_unsynced.\n {\n entity_specifics.mutable_bookmark()->set_url(\"url2\");\n entity_specifics.mutable_bookmark()->set_title(\"title2\");\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n node.SetEntitySpecifics(entity_specifics);\n const syncable::Entry* node_entry = node.GetEntry();\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n EXPECT_TRUE(node_entry->Get(IS_UNSYNCED));\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n Cryptographer* cryptographer = trans.GetCryptographer();\n EXPECT_TRUE(cryptographer->CanDecryptUsingDefaultKey(\n specifics.encrypted()));\n }\n}\n\n// Passwords have their own handling for encryption. Verify it does not result\n// in unnecessary writes via SetEntitySpecifics.\nTEST_F(SyncManagerTest, UpdatePasswordSetEntitySpecificsNoChange) {\n std::string client_tag = \"title\";\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n sync_pb::EntitySpecifics entity_specifics;\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n sync_pb::PasswordSpecificsData data;\n data.set_password_value(\"secret\");\n cryptographer->Encrypt(\n data,\n entity_specifics.mutable_password()->\n mutable_encrypted());\n }\n MakeServerNode(sync_manager_.GetUserShare(), PASSWORDS, client_tag,\n syncable::GenerateSyncableHash(PASSWORDS,\n client_tag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(PASSWORDS, client_tag));\n\n // Manually change to the same data via SetEntitySpecifics. Should not set\n // is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PASSWORDS, client_tag));\n node.SetEntitySpecifics(entity_specifics);\n }\n EXPECT_FALSE(ResetUnsyncedEntry(PASSWORDS, client_tag));\n}\n\n// Passwords have their own handling for encryption. Verify it does not result\n// in unnecessary writes via SetPasswordSpecifics.\nTEST_F(SyncManagerTest, UpdatePasswordSetPasswordSpecifics) {\n std::string client_tag = \"title\";\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n sync_pb::EntitySpecifics entity_specifics;\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n sync_pb::PasswordSpecificsData data;\n data.set_password_value(\"secret\");\n cryptographer->Encrypt(\n data,\n entity_specifics.mutable_password()->\n mutable_encrypted());\n }\n MakeServerNode(sync_manager_.GetUserShare(), PASSWORDS, client_tag,\n syncable::GenerateSyncableHash(PASSWORDS,\n client_tag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(PASSWORDS, client_tag));\n\n // Manually change to the same data via SetPasswordSpecifics. Should not set\n // is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PASSWORDS, client_tag));\n node.SetPasswordSpecifics(node.GetPasswordSpecifics());\n }\n EXPECT_FALSE(ResetUnsyncedEntry(PASSWORDS, client_tag));\n\n // Manually change to different data. Should set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PASSWORDS, client_tag));\n Cryptographer* cryptographer = trans.GetCryptographer();\n sync_pb::PasswordSpecificsData data;\n data.set_password_value(\"secret2\");\n cryptographer->Encrypt(\n data,\n entity_specifics.mutable_password()->mutable_encrypted());\n node.SetPasswordSpecifics(data);\n const syncable::Entry* node_entry = node.GetEntry();\n EXPECT_TRUE(node_entry->Get(IS_UNSYNCED));\n }\n}\n\n// Passwords have their own handling for encryption. Verify setting a new\n// passphrase updates the data.\nTEST_F(SyncManagerTest, UpdatePasswordNewPassphrase) {\n std::string client_tag = \"title\";\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n sync_pb::EntitySpecifics entity_specifics;\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n sync_pb::PasswordSpecificsData data;\n data.set_password_value(\"secret\");\n cryptographer->Encrypt(\n data,\n entity_specifics.mutable_password()->mutable_encrypted());\n }\n MakeServerNode(sync_manager_.GetUserShare(), PASSWORDS, client_tag,\n syncable::GenerateSyncableHash(PASSWORDS,\n client_tag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(PASSWORDS, client_tag));\n\n // Set a new passphrase. Should set is_unsynced.\n testing::Mock::VerifyAndClearExpectations(&encryption_observer_);\n EXPECT_CALL(encryption_observer_,\n OnBootstrapTokenUpdated(_, PASSPHRASE_BOOTSTRAP_TOKEN));\n EXPECT_CALL(encryption_observer_, OnPassphraseAccepted());\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_,\n OnPassphraseTypeChanged(CUSTOM_PASSPHRASE, _));\n sync_manager_.GetEncryptionHandler()->SetEncryptionPassphrase(\n \"new_passphrase\",\n true);\n EXPECT_EQ(CUSTOM_PASSPHRASE,\n sync_manager_.GetEncryptionHandler()->GetPassphraseType());\n EXPECT_TRUE(ResetUnsyncedEntry(PASSWORDS, client_tag));\n}\n\n// Passwords have their own handling for encryption. Verify it does not result\n// in unnecessary writes via ReencryptEverything.\nTEST_F(SyncManagerTest, UpdatePasswordReencryptEverything) {\n std::string client_tag = \"title\";\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n sync_pb::EntitySpecifics entity_specifics;\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* cryptographer = trans.GetCryptographer();\n sync_pb::PasswordSpecificsData data;\n data.set_password_value(\"secret\");\n cryptographer->Encrypt(\n data,\n entity_specifics.mutable_password()->mutable_encrypted());\n }\n MakeServerNode(sync_manager_.GetUserShare(), PASSWORDS, client_tag,\n syncable::GenerateSyncableHash(PASSWORDS,\n client_tag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(PASSWORDS, client_tag));\n\n // Force a re-encrypt everything. Should not set is_unsynced.\n testing::Mock::VerifyAndClearExpectations(&encryption_observer_);\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_, OnEncryptedTypesChanged(_, false));\n sync_manager_.GetEncryptionHandler()->Init();\n PumpLoop();\n EXPECT_FALSE(ResetUnsyncedEntry(PASSWORDS, client_tag));\n}\n\n// Verify SetTitle(..) doesn't unnecessarily set IS_UNSYNCED for bookmarks\n// when we write the same data, but does set it when we write new data.\nTEST_F(SyncManagerTest, SetBookmarkTitle) {\n std::string client_tag = \"title\";\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_bookmark()->set_url(\"url\");\n entity_specifics.mutable_bookmark()->set_title(\"title\");\n MakeServerNode(sync_manager_.GetUserShare(), BOOKMARKS, client_tag,\n syncable::GenerateSyncableHash(BOOKMARKS,\n client_tag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Manually change to the same title. Should not set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n node.SetTitle(UTF8ToWide(client_tag));\n }\n EXPECT_FALSE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Manually change to new title. Should set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n node.SetTitle(UTF8ToWide(\"title2\"));\n }\n EXPECT_TRUE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n}\n\n// Verify SetTitle(..) doesn't unnecessarily set IS_UNSYNCED for encrypted\n// bookmarks when we write the same data, but does set it when we write new\n// data.\nTEST_F(SyncManagerTest, SetBookmarkTitleWithEncryption) {\n std::string client_tag = \"title\";\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_bookmark()->set_url(\"url\");\n entity_specifics.mutable_bookmark()->set_title(\"title\");\n MakeServerNode(sync_manager_.GetUserShare(), BOOKMARKS, client_tag,\n syncable::GenerateSyncableHash(BOOKMARKS,\n client_tag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Encrypt the datatatype, should set is_unsynced.\n EXPECT_CALL(encryption_observer_,\n OnEncryptedTypesChanged(\n HasModelTypes(EncryptableUserTypes()), true));\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, FULL_ENCRYPTION));\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_, OnEncryptedTypesChanged(_, true));\n sync_manager_.GetEncryptionHandler()->Init();\n PumpLoop();\n EXPECT_TRUE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Manually change to the same title. Should not set is_unsynced.\n // NON_UNIQUE_NAME should be kEncryptedString.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n node.SetTitle(UTF8ToWide(client_tag));\n const syncable::Entry* node_entry = node.GetEntry();\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n }\n EXPECT_FALSE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n\n // Manually change to new title. Should set is_unsynced. NON_UNIQUE_NAME\n // should still be kEncryptedString.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n node.SetTitle(UTF8ToWide(\"title2\"));\n const syncable::Entry* node_entry = node.GetEntry();\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n }\n EXPECT_TRUE(ResetUnsyncedEntry(BOOKMARKS, client_tag));\n}\n\n// Verify SetTitle(..) doesn't unnecessarily set IS_UNSYNCED for non-bookmarks\n// when we write the same data, but does set it when we write new data.\nTEST_F(SyncManagerTest, SetNonBookmarkTitle) {\n std::string client_tag = \"title\";\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_preference()->set_name(\"name\");\n entity_specifics.mutable_preference()->set_value(\"value\");\n MakeServerNode(sync_manager_.GetUserShare(),\n PREFERENCES,\n client_tag,\n syncable::GenerateSyncableHash(PREFERENCES,\n client_tag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(PREFERENCES, client_tag));\n\n // Manually change to the same title. Should not set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PREFERENCES, client_tag));\n node.SetTitle(UTF8ToWide(client_tag));\n }\n EXPECT_FALSE(ResetUnsyncedEntry(PREFERENCES, client_tag));\n\n // Manually change to new title. Should set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PREFERENCES, client_tag));\n node.SetTitle(UTF8ToWide(\"title2\"));\n }\n EXPECT_TRUE(ResetUnsyncedEntry(PREFERENCES, client_tag));\n}\n\n// Verify SetTitle(..) doesn't unnecessarily set IS_UNSYNCED for encrypted\n// non-bookmarks when we write the same data or when we write new data\n// data (should remained kEncryptedString).\nTEST_F(SyncManagerTest, SetNonBookmarkTitleWithEncryption) {\n std::string client_tag = \"title\";\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_preference()->set_name(\"name\");\n entity_specifics.mutable_preference()->set_value(\"value\");\n MakeServerNode(sync_manager_.GetUserShare(),\n PREFERENCES,\n client_tag,\n syncable::GenerateSyncableHash(PREFERENCES,\n client_tag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(PREFERENCES, client_tag));\n\n // Encrypt the datatatype, should set is_unsynced.\n EXPECT_CALL(encryption_observer_,\n OnEncryptedTypesChanged(\n HasModelTypes(EncryptableUserTypes()), true));\n EXPECT_CALL(encryption_observer_, OnEncryptionComplete());\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, FULL_ENCRYPTION));\n EXPECT_CALL(encryption_observer_, OnCryptographerStateChanged(_));\n EXPECT_CALL(encryption_observer_, OnEncryptedTypesChanged(_, true));\n sync_manager_.GetEncryptionHandler()->Init();\n PumpLoop();\n EXPECT_TRUE(ResetUnsyncedEntry(PREFERENCES, client_tag));\n\n // Manually change to the same title. Should not set is_unsynced.\n // NON_UNIQUE_NAME should be kEncryptedString.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PREFERENCES, client_tag));\n node.SetTitle(UTF8ToWide(client_tag));\n const syncable::Entry* node_entry = node.GetEntry();\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n }\n EXPECT_FALSE(ResetUnsyncedEntry(PREFERENCES, client_tag));\n\n // Manually change to new title. Should not set is_unsynced because the\n // NON_UNIQUE_NAME should still be kEncryptedString.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PREFERENCES, client_tag));\n node.SetTitle(UTF8ToWide(\"title2\"));\n const syncable::Entry* node_entry = node.GetEntry();\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n EXPECT_FALSE(node_entry->Get(IS_UNSYNCED));\n }\n}\n\n// Ensure that titles are truncated to 255 bytes, and attempting to reset\n// them to their longer version does not set IS_UNSYNCED.\nTEST_F(SyncManagerTest, SetLongTitle) {\n const int kNumChars = 512;\n const std::string kClientTag = \"tag\";\n std::string title(kNumChars, '0');\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_preference()->set_name(\"name\");\n entity_specifics.mutable_preference()->set_value(\"value\");\n MakeServerNode(sync_manager_.GetUserShare(),\n PREFERENCES,\n \"short_title\",\n syncable::GenerateSyncableHash(PREFERENCES,\n kClientTag),\n entity_specifics);\n // New node shouldn't start off unsynced.\n EXPECT_FALSE(ResetUnsyncedEntry(PREFERENCES, kClientTag));\n\n // Manually change to the long title. Should set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PREFERENCES, kClientTag));\n node.SetTitle(UTF8ToWide(title));\n EXPECT_EQ(node.GetTitle(), title.substr(0, 255));\n }\n EXPECT_TRUE(ResetUnsyncedEntry(PREFERENCES, kClientTag));\n\n // Manually change to the same title. Should not set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PREFERENCES, kClientTag));\n node.SetTitle(UTF8ToWide(title));\n EXPECT_EQ(node.GetTitle(), title.substr(0, 255));\n }\n EXPECT_FALSE(ResetUnsyncedEntry(PREFERENCES, kClientTag));\n\n // Manually change to new title. Should set is_unsynced.\n {\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(PREFERENCES, kClientTag));\n node.SetTitle(UTF8ToWide(\"title2\"));\n }\n EXPECT_TRUE(ResetUnsyncedEntry(PREFERENCES, kClientTag));\n}\n\n// Create an encrypted entry when the cryptographer doesn't think the type is\n// marked for encryption. Ensure reads/writes don't break and don't unencrypt\n// the data.\nTEST_F(SyncManagerTest, SetPreviouslyEncryptedSpecifics) {\n std::string client_tag = \"tag\";\n std::string url = \"url\";\n std::string url2 = \"new_url\";\n std::string title = \"title\";\n sync_pb::EntitySpecifics entity_specifics;\n EXPECT_TRUE(SetUpEncryption(WRITE_TO_NIGORI, DEFAULT_ENCRYPTION));\n {\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n Cryptographer* crypto = trans.GetCryptographer();\n sync_pb::EntitySpecifics bm_specifics;\n bm_specifics.mutable_bookmark()->set_title(\"title\");\n bm_specifics.mutable_bookmark()->set_url(\"url\");\n sync_pb::EncryptedData encrypted;\n crypto->Encrypt(bm_specifics, &encrypted);\n entity_specifics.mutable_encrypted()->CopyFrom(encrypted);\n AddDefaultFieldValue(BOOKMARKS, &entity_specifics);\n }\n MakeServerNode(sync_manager_.GetUserShare(), BOOKMARKS, client_tag,\n syncable::GenerateSyncableHash(BOOKMARKS,\n client_tag),\n entity_specifics);\n\n {\n // Verify the data.\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n EXPECT_EQ(title, node.GetTitle());\n EXPECT_EQ(url, node.GetBookmarkSpecifics().url());\n }\n\n {\n // Overwrite the url (which overwrites the specifics).\n WriteTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n WriteNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n\n sync_pb::BookmarkSpecifics bookmark_specifics(node.GetBookmarkSpecifics());\n bookmark_specifics.set_url(url2);\n node.SetBookmarkSpecifics(bookmark_specifics);\n }\n\n {\n // Verify it's still encrypted and it has the most recent url.\n ReadTransaction trans(FROM_HERE, sync_manager_.GetUserShare());\n ReadNode node(&trans);\n EXPECT_EQ(BaseNode::INIT_OK,\n node.InitByClientTagLookup(BOOKMARKS, client_tag));\n EXPECT_EQ(title, node.GetTitle());\n EXPECT_EQ(url2, node.GetBookmarkSpecifics().url());\n const syncable::Entry* node_entry = node.GetEntry();\n EXPECT_EQ(kEncryptedString, node_entry->Get(NON_UNIQUE_NAME));\n const sync_pb::EntitySpecifics& specifics = node_entry->Get(SPECIFICS);\n EXPECT_TRUE(specifics.has_encrypted());\n }\n}\n\n// Verify transaction version of a model type is incremented when node of\n// that type is updated.\nTEST_F(SyncManagerTest, IncrementTransactionVersion) {\n ModelSafeRoutingInfo routing_info;\n GetModelSafeRoutingInfo(&routing_info);\n\n {\n ReadTransaction read_trans(FROM_HERE, sync_manager_.GetUserShare());\n for (ModelSafeRoutingInfo::iterator i = routing_info.begin();\n i != routing_info.end(); ++i) {\n // Transaction version is incremented when SyncManagerTest::SetUp()\n // creates a node of each type.\n EXPECT_EQ(1,\n sync_manager_.GetUserShare()->directory->\n GetTransactionVersion(i->first));\n }\n }\n\n // Create bookmark node to increment transaction version of bookmark model.\n std::string client_tag = \"title\";\n sync_pb::EntitySpecifics entity_specifics;\n entity_specifics.mutable_bookmark()->set_url(\"url\");\n entity_specifics.mutable_bookmark()->set_title(\"title\");\n MakeServerNode(sync_manager_.GetUserShare(), BOOKMARKS, client_tag,\n syncable::GenerateSyncableHash(BOOKMARKS,\n client_tag),\n entity_specifics);\n\n {\n ReadTransaction read_trans(FROM_HERE, sync_manager_.GetUserShare());\n for (ModelSafeRoutingInfo::iterator i = routing_info.begin();\n i != routing_info.end(); ++i) {\n EXPECT_EQ(i->first == BOOKMARKS ? 2 : 1,\n sync_manager_.GetUserShare()->directory->\n GetTransactionVersion(i->first));\n }\n }\n}\n\nclass MockSyncScheduler : public FakeSyncScheduler {\n public:\n MockSyncScheduler() : FakeSyncScheduler() {}\n virtual ~MockSyncScheduler() {}\n\n MOCK_METHOD1(Start, void(SyncScheduler::Mode));\n MOCK_METHOD1(ScheduleConfiguration, bool(const ConfigurationParams&));\n};\n\nclass ComponentsFactory : public TestInternalComponentsFactory {\n public:\n ComponentsFactory(const Switches& switches,\n SyncScheduler* scheduler_to_use,\n sessions::SyncSessionContext** session_context)\n : TestInternalComponentsFactory(switches, syncer::STORAGE_IN_MEMORY),\n scheduler_to_use_(scheduler_to_use),\n session_context_(session_context) {}\n virtual ~ComponentsFactory() {}\n\n virtual scoped_ptr BuildScheduler(\n const std::string& name,\n sessions::SyncSessionContext* context) OVERRIDE {\n *session_context_ = context;\n return scheduler_to_use_.Pass();\n }\n\n private:\n scoped_ptr scheduler_to_use_;\n sessions::SyncSessionContext** session_context_;\n};\n\nclass SyncManagerTestWithMockScheduler : public SyncManagerTest {\n public:\n SyncManagerTestWithMockScheduler() : scheduler_(NULL) {}\n virtual InternalComponentsFactory* GetFactory() OVERRIDE {\n scheduler_ = new MockSyncScheduler();\n return new ComponentsFactory(GetSwitches(), scheduler_, &session_context_);\n }\n\n MockSyncScheduler* scheduler() { return scheduler_; }\n sessions::SyncSessionContext* session_context() {\n return session_context_;\n }\n\n private:\n MockSyncScheduler* scheduler_;\n sessions::SyncSessionContext* session_context_;\n};\n\n// Test that the configuration params are properly created and sent to\n// ScheduleConfigure. No callback should be invoked. Any disabled datatypes\n// should be purged.\nTEST_F(SyncManagerTestWithMockScheduler, BasicConfiguration) {\n ConfigureReason reason = CONFIGURE_REASON_RECONFIGURATION;\n ModelTypeSet types_to_download(BOOKMARKS, PREFERENCES);\n ModelSafeRoutingInfo new_routing_info;\n GetModelSafeRoutingInfo(&new_routing_info);\n ModelTypeSet enabled_types = GetRoutingInfoTypes(new_routing_info);\n ModelTypeSet disabled_types = Difference(ModelTypeSet::All(), enabled_types);\n\n ConfigurationParams params;\n EXPECT_CALL(*scheduler(), Start(SyncScheduler::CONFIGURATION_MODE));\n EXPECT_CALL(*scheduler(), ScheduleConfiguration(_)).\n WillOnce(DoAll(SaveArg<0>(¶ms), Return(true)));\n\n // Set data for all types.\n ModelTypeSet protocol_types = ProtocolTypes();\n for (ModelTypeSet::Iterator iter = protocol_types.First(); iter.Good();\n iter.Inc()) {\n SetProgressMarkerForType(iter.Get(), true);\n }\n\n CallbackCounter ready_task_counter, retry_task_counter;\n sync_manager_.ConfigureSyncer(\n reason,\n types_to_download,\n ModelTypeSet(),\n ModelTypeSet(),\n ModelTypeSet(),\n new_routing_info,\n base::Bind(&CallbackCounter::Callback,\n base::Unretained(&ready_task_counter)),\n base::Bind(&CallbackCounter::Callback,\n base::Unretained(&retry_task_counter)));\n EXPECT_EQ(0, ready_task_counter.times_called());\n EXPECT_EQ(0, retry_task_counter.times_called());\n EXPECT_EQ(sync_pb::GetUpdatesCallerInfo::RECONFIGURATION,\n params.source);\n EXPECT_TRUE(types_to_download.Equals(params.types_to_download));\n EXPECT_EQ(new_routing_info, params.routing_info);\n\n // Verify all the disabled types were purged.\n EXPECT_TRUE(sync_manager_.InitialSyncEndedTypes().Equals(\n enabled_types));\n EXPECT_TRUE(sync_manager_.GetTypesWithEmptyProgressMarkerToken(\n ModelTypeSet::All()).Equals(disabled_types));\n}\n\n// Test that on a reconfiguration (configuration where the session context\n// already has routing info), only those recently disabled types are purged.\nTEST_F(SyncManagerTestWithMockScheduler, ReConfiguration) {\n ConfigureReason reason = CONFIGURE_REASON_RECONFIGURATION;\n ModelTypeSet types_to_download(BOOKMARKS, PREFERENCES);\n ModelTypeSet disabled_types = ModelTypeSet(THEMES, SESSIONS);\n ModelSafeRoutingInfo old_routing_info;\n ModelSafeRoutingInfo new_routing_info;\n GetModelSafeRoutingInfo(&old_routing_info);\n new_routing_info = old_routing_info;\n new_routing_info.erase(THEMES);\n new_routing_info.erase(SESSIONS);\n ModelTypeSet enabled_types = GetRoutingInfoTypes(new_routing_info);\n\n ConfigurationParams params;\n EXPECT_CALL(*scheduler(), Start(SyncScheduler::CONFIGURATION_MODE));\n EXPECT_CALL(*scheduler(), ScheduleConfiguration(_)).\n WillOnce(DoAll(SaveArg<0>(¶ms), Return(true)));\n\n // Set data for all types except those recently disabled (so we can verify\n // only those recently disabled are purged) .\n ModelTypeSet protocol_types = ProtocolTypes();\n for (ModelTypeSet::Iterator iter = protocol_types.First(); iter.Good();\n iter.Inc()) {\n if (!disabled_types.Has(iter.Get())) {\n SetProgressMarkerForType(iter.Get(), true);\n } else {\n SetProgressMarkerForType(iter.Get(), false);\n }\n }\n\n // Set the context to have the old routing info.\n session_context()->set_routing_info(old_routing_info);\n\n CallbackCounter ready_task_counter, retry_task_counter;\n sync_manager_.ConfigureSyncer(\n reason,\n types_to_download,\n ModelTypeSet(),\n ModelTypeSet(),\n ModelTypeSet(),\n new_routing_info,\n base::Bind(&CallbackCounter::Callback,\n base::Unretained(&ready_task_counter)),\n base::Bind(&CallbackCounter::Callback,\n base::Unretained(&retry_task_counter)));\n EXPECT_EQ(0, ready_task_counter.times_called());\n EXPECT_EQ(0, retry_task_counter.times_called());\n EXPECT_EQ(sync_pb::GetUpdatesCallerInfo::RECONFIGURATION,\n params.source);\n EXPECT_TRUE(types_to_download.Equals(params.types_to_download));\n EXPECT_EQ(new_routing_info, params.routing_info);\n\n // Verify only the recently disabled types were purged.\n EXPECT_TRUE(sync_manager_.GetTypesWithEmptyProgressMarkerToken(\n ProtocolTypes()).Equals(disabled_types));\n}\n\n// Test that the retry callback is invoked on configuration failure.\nTEST_F(SyncManagerTestWithMockScheduler, ConfigurationRetry) {\n ConfigureReason reason = CONFIGURE_REASON_RECONFIGURATION;\n ModelTypeSet types_to_download(BOOKMARKS, PREFERENCES);\n ModelSafeRoutingInfo new_routing_info;\n GetModelSafeRoutingInfo(&new_routing_info);\n\n ConfigurationParams params;\n EXPECT_CALL(*scheduler(), Start(SyncScheduler::CONFIGURATION_MODE));\n EXPECT_CALL(*scheduler(), ScheduleConfiguration(_)).\n WillOnce(DoAll(SaveArg<0>(¶ms), Return(false)));\n\n CallbackCounter ready_task_counter, retry_task_counter;\n sync_manager_.ConfigureSyncer(\n reason,\n types_to_download,\n ModelTypeSet(),\n ModelTypeSet(),\n ModelTypeSet(),\n new_routing_info,\n base::Bind(&CallbackCounter::Callback,\n base::Unretained(&ready_task_counter)),\n base::Bind(&CallbackCounter::Callback,\n base::Unretained(&retry_task_counter)));\n EXPECT_EQ(0, ready_task_counter.times_called());\n EXPECT_EQ(1, retry_task_counter.times_called());\n EXPECT_EQ(sync_pb::GetUpdatesCallerInfo::RECONFIGURATION,\n params.source);\n EXPECT_TRUE(types_to_download.Equals(params.types_to_download));\n EXPECT_EQ(new_routing_info, params.routing_info);\n}\n\n// Test that PurgePartiallySyncedTypes purges only those types that have not\n// fully completed their initial download and apply.\nTEST_F(SyncManagerTest, PurgePartiallySyncedTypes) {\n ModelSafeRoutingInfo routing_info;\n GetModelSafeRoutingInfo(&routing_info);\n ModelTypeSet enabled_types = GetRoutingInfoTypes(routing_info);\n\n UserShare* share = sync_manager_.GetUserShare();\n\n // The test harness automatically initializes all types in the routing info.\n // Check that autofill is not among them.\n ASSERT_FALSE(enabled_types.Has(AUTOFILL));\n\n // Further ensure that the test harness did not create its root node.\n {\n syncable::ReadTransaction trans(FROM_HERE, share->directory.get());\n syncable::Entry autofill_root_node(&trans, syncable::GET_BY_SERVER_TAG,\n ModelTypeToRootTag(AUTOFILL));\n ASSERT_FALSE(autofill_root_node.good());\n }\n\n // One more redundant check.\n ASSERT_FALSE(sync_manager_.InitialSyncEndedTypes().Has(AUTOFILL));\n\n // Give autofill a progress marker.\n sync_pb::DataTypeProgressMarker autofill_marker;\n autofill_marker.set_data_type_id(\n GetSpecificsFieldNumberFromModelType(AUTOFILL));\n autofill_marker.set_token(\"token\");\n share->directory->SetDownloadProgress(AUTOFILL, autofill_marker);\n\n // Also add a pending autofill root node update from the server.\n TestEntryFactory factory_(share->directory.get());\n int autofill_meta = factory_.CreateUnappliedRootNode(AUTOFILL);\n\n // Preferences is an enabled type. Check that the harness initialized it.\n ASSERT_TRUE(enabled_types.Has(PREFERENCES));\n ASSERT_TRUE(sync_manager_.InitialSyncEndedTypes().Has(PREFERENCES));\n\n // Give preferencse a progress marker.\n sync_pb::DataTypeProgressMarker prefs_marker;\n prefs_marker.set_data_type_id(\n GetSpecificsFieldNumberFromModelType(PREFERENCES));\n prefs_marker.set_token(\"token\");\n share->directory->SetDownloadProgress(PREFERENCES, prefs_marker);\n\n // Add a fully synced preferences node under the root.\n std::string pref_client_tag = \"prefABC\";\n std::string pref_hashed_tag = \"hashXYZ\";\n sync_pb::EntitySpecifics pref_specifics;\n AddDefaultFieldValue(PREFERENCES, &pref_specifics);\n int pref_meta = MakeServerNode(\n share, PREFERENCES, pref_client_tag, pref_hashed_tag, pref_specifics);\n\n // And now, the purge.\n EXPECT_TRUE(sync_manager_.PurgePartiallySyncedTypes());\n\n // Ensure that autofill lost its progress marker, but preferences did not.\n ModelTypeSet empty_tokens =\n sync_manager_.GetTypesWithEmptyProgressMarkerToken(ModelTypeSet::All());\n EXPECT_TRUE(empty_tokens.Has(AUTOFILL));\n EXPECT_FALSE(empty_tokens.Has(PREFERENCES));\n\n // Ensure that autofill lots its node, but preferences did not.\n {\n syncable::ReadTransaction trans(FROM_HERE, share->directory.get());\n syncable::Entry autofill_node(&trans, GET_BY_HANDLE, autofill_meta);\n syncable::Entry pref_node(&trans, GET_BY_HANDLE, pref_meta);\n EXPECT_FALSE(autofill_node.good());\n EXPECT_TRUE(pref_node.good());\n }\n}\n\n// Test CleanupDisabledTypes properly purges all disabled types as specified\n// by the previous and current enabled params.\nTEST_F(SyncManagerTest, PurgeDisabledTypes) {\n ModelSafeRoutingInfo routing_info;\n GetModelSafeRoutingInfo(&routing_info);\n ModelTypeSet enabled_types = GetRoutingInfoTypes(routing_info);\n ModelTypeSet disabled_types = Difference(ModelTypeSet::All(), enabled_types);\n\n // The harness should have initialized the enabled_types for us.\n EXPECT_TRUE(enabled_types.Equals(sync_manager_.InitialSyncEndedTypes()));\n\n // Set progress markers for all types.\n ModelTypeSet protocol_types = ProtocolTypes();\n for (ModelTypeSet::Iterator iter = protocol_types.First(); iter.Good();\n iter.Inc()) {\n SetProgressMarkerForType(iter.Get(), true);\n }\n\n // Verify all the enabled types remain after cleanup, and all the disabled\n // types were purged.\n sync_manager_.PurgeDisabledTypes(ModelTypeSet::All(),\n enabled_types,\n ModelTypeSet(),\n ModelTypeSet());\n EXPECT_TRUE(enabled_types.Equals(sync_manager_.InitialSyncEndedTypes()));\n EXPECT_TRUE(disabled_types.Equals(\n sync_manager_.GetTypesWithEmptyProgressMarkerToken(ModelTypeSet::All())));\n\n // Disable some more types.\n disabled_types.Put(BOOKMARKS);\n disabled_types.Put(PREFERENCES);\n ModelTypeSet new_enabled_types =\n Difference(ModelTypeSet::All(), disabled_types);\n\n // Verify only the non-disabled types remain after cleanup.\n sync_manager_.PurgeDisabledTypes(enabled_types,\n new_enabled_types,\n ModelTypeSet(),\n ModelTypeSet());\n EXPECT_TRUE(new_enabled_types.Equals(sync_manager_.InitialSyncEndedTypes()));\n EXPECT_TRUE(disabled_types.Equals(\n sync_manager_.GetTypesWithEmptyProgressMarkerToken(ModelTypeSet::All())));\n}\n\n// Test PurgeDisabledTypes properly unapplies types by deleting their local data\n// and preserving their server data and progress marker.\nTEST_F(SyncManagerTest, PurgeUnappliedTypes) {\n ModelSafeRoutingInfo routing_info;\n GetModelSafeRoutingInfo(&routing_info);\n ModelTypeSet unapplied_types = ModelTypeSet(BOOKMARKS, PREFERENCES);\n ModelTypeSet enabled_types = GetRoutingInfoTypes(routing_info);\n ModelTypeSet disabled_types = Difference(ModelTypeSet::All(), enabled_types);\n\n // The harness should have initialized the enabled_types for us.\n EXPECT_TRUE(enabled_types.Equals(sync_manager_.InitialSyncEndedTypes()));\n\n // Set progress markers for all types.\n ModelTypeSet protocol_types = ProtocolTypes();\n for (ModelTypeSet::Iterator iter = protocol_types.First(); iter.Good();\n iter.Inc()) {\n SetProgressMarkerForType(iter.Get(), true);\n }\n\n // Add the following kinds of items:\n // 1. Fully synced preference.\n // 2. Locally created preference, server unknown, unsynced\n // 3. Locally deleted preference, server known, unsynced\n // 4. Server deleted preference, locally known.\n // 5. Server created preference, locally unknown, unapplied.\n // 6. A fully synced bookmark (no unique_client_tag).\n UserShare* share = sync_manager_.GetUserShare();\n sync_pb::EntitySpecifics pref_specifics;\n AddDefaultFieldValue(PREFERENCES, &pref_specifics);\n sync_pb::EntitySpecifics bm_specifics;\n AddDefaultFieldValue(BOOKMARKS, &bm_specifics);\n int pref1_meta = MakeServerNode(\n share, PREFERENCES, \"pref1\", \"hash1\", pref_specifics);\n int64 pref2_meta = MakeNode(share, PREFERENCES, \"pref2\");\n int pref3_meta = MakeServerNode(\n share, PREFERENCES, \"pref3\", \"hash3\", pref_specifics);\n int pref4_meta = MakeServerNode(\n share, PREFERENCES, \"pref4\", \"hash4\", pref_specifics);\n int pref5_meta = MakeServerNode(\n share, PREFERENCES, \"pref5\", \"hash5\", pref_specifics);\n int bookmark_meta = MakeServerNode(\n share, BOOKMARKS, \"bookmark\", \"\", bm_specifics);\n\n {\n syncable::WriteTransaction trans(FROM_HERE,\n syncable::SYNCER,\n share->directory.get());\n // Pref's 1 and 2 are already set up properly.\n // Locally delete pref 3.\n syncable::MutableEntry pref3(&trans, GET_BY_HANDLE, pref3_meta);\n pref3.Put(IS_DEL, true);\n pref3.Put(IS_UNSYNCED, true);\n // Delete pref 4 at the server.\n syncable::MutableEntry pref4(&trans, GET_BY_HANDLE, pref4_meta);\n pref4.Put(syncable::SERVER_IS_DEL, true);\n pref4.Put(syncable::IS_UNAPPLIED_UPDATE, true);\n pref4.Put(syncable::SERVER_VERSION, 2);\n // Pref 5 is an new unapplied update.\n syncable::MutableEntry pref5(&trans, GET_BY_HANDLE, pref5_meta);\n pref5.Put(syncable::IS_UNAPPLIED_UPDATE, true);\n pref5.Put(syncable::IS_DEL, true);\n pref5.Put(syncable::BASE_VERSION, -1);\n // Bookmark is already set up properly\n }\n\n // Take a snapshot to clear all the dirty bits.\n share->directory.get()->SaveChanges();\n\n // Now request a purge for the unapplied types.\n disabled_types.PutAll(unapplied_types);\n ModelTypeSet new_enabled_types =\n Difference(ModelTypeSet::All(), disabled_types);\n sync_manager_.PurgeDisabledTypes(enabled_types,\n new_enabled_types,\n ModelTypeSet(),\n unapplied_types);\n\n // Verify the unapplied types still have progress markers and initial sync\n // ended after cleanup.\n EXPECT_TRUE(sync_manager_.InitialSyncEndedTypes().HasAll(unapplied_types));\n EXPECT_TRUE(\n sync_manager_.GetTypesWithEmptyProgressMarkerToken(unapplied_types).\n Empty());\n\n // Ensure the items were unapplied as necessary.\n {\n syncable::ReadTransaction trans(FROM_HERE, share->directory.get());\n syncable::Entry pref_node(&trans, GET_BY_HANDLE, pref1_meta);\n ASSERT_TRUE(pref_node.good());\n EXPECT_TRUE(pref_node.GetKernelCopy().is_dirty());\n EXPECT_FALSE(pref_node.Get(syncable::IS_UNSYNCED));\n EXPECT_TRUE(pref_node.Get(syncable::IS_UNAPPLIED_UPDATE));\n EXPECT_TRUE(pref_node.Get(IS_DEL));\n EXPECT_GT(pref_node.Get(syncable::SERVER_VERSION), 0);\n EXPECT_EQ(pref_node.Get(syncable::BASE_VERSION), -1);\n\n // Pref 2 should just be locally deleted.\n syncable::Entry pref2_node(&trans, GET_BY_HANDLE, pref2_meta);\n ASSERT_TRUE(pref2_node.good());\n EXPECT_TRUE(pref2_node.GetKernelCopy().is_dirty());\n EXPECT_FALSE(pref2_node.Get(syncable::IS_UNSYNCED));\n EXPECT_TRUE(pref2_node.Get(syncable::IS_DEL));\n EXPECT_FALSE(pref2_node.Get(syncable::IS_UNAPPLIED_UPDATE));\n EXPECT_TRUE(pref2_node.Get(IS_DEL));\n EXPECT_EQ(pref2_node.Get(syncable::SERVER_VERSION), 0);\n EXPECT_EQ(pref2_node.Get(syncable::BASE_VERSION), -1);\n\n syncable::Entry pref3_node(&trans, GET_BY_HANDLE, pref3_meta);\n ASSERT_TRUE(pref3_node.good());\n EXPECT_TRUE(pref3_node.GetKernelCopy().is_dirty());\n EXPECT_FALSE(pref3_node.Get(syncable::IS_UNSYNCED));\n EXPECT_TRUE(pref3_node.Get(syncable::IS_UNAPPLIED_UPDATE));\n EXPECT_TRUE(pref3_node.Get(IS_DEL));\n EXPECT_GT(pref3_node.Get(syncable::SERVER_VERSION), 0);\n EXPECT_EQ(pref3_node.Get(syncable::BASE_VERSION), -1);\n\n syncable::Entry pref4_node(&trans, GET_BY_HANDLE, pref4_meta);\n ASSERT_TRUE(pref4_node.good());\n EXPECT_TRUE(pref4_node.GetKernelCopy().is_dirty());\n EXPECT_FALSE(pref4_node.Get(syncable::IS_UNSYNCED));\n EXPECT_TRUE(pref4_node.Get(syncable::IS_UNAPPLIED_UPDATE));\n EXPECT_TRUE(pref4_node.Get(IS_DEL));\n EXPECT_GT(pref4_node.Get(syncable::SERVER_VERSION), 0);\n EXPECT_EQ(pref4_node.Get(syncable::BASE_VERSION), -1);\n\n // Pref 5 should remain untouched.\n syncable::Entry pref5_node(&trans, GET_BY_HANDLE, pref5_meta);\n ASSERT_TRUE(pref5_node.good());\n EXPECT_FALSE(pref5_node.GetKernelCopy().is_dirty());\n EXPECT_FALSE(pref5_node.Get(syncable::IS_UNSYNCED));\n EXPECT_TRUE(pref5_node.Get(syncable::IS_UNAPPLIED_UPDATE));\n EXPECT_TRUE(pref5_node.Get(IS_DEL));\n EXPECT_GT(pref5_node.Get(syncable::SERVER_VERSION), 0);\n EXPECT_EQ(pref5_node.Get(syncable::BASE_VERSION), -1);\n\n syncable::Entry bookmark_node(&trans, GET_BY_HANDLE, bookmark_meta);\n ASSERT_TRUE(bookmark_node.good());\n EXPECT_TRUE(bookmark_node.GetKernelCopy().is_dirty());\n EXPECT_FALSE(bookmark_node.Get(syncable::IS_UNSYNCED));\n EXPECT_TRUE(bookmark_node.Get(syncable::IS_UNAPPLIED_UPDATE));\n EXPECT_TRUE(bookmark_node.Get(IS_DEL));\n EXPECT_GT(bookmark_node.Get(syncable::SERVER_VERSION), 0);\n EXPECT_EQ(bookmark_node.Get(syncable::BASE_VERSION), -1);\n }\n}\n\n// A test harness to exercise the code that processes and passes changes from\n// the \"SYNCER\"-WriteTransaction destructor, through the SyncManager, to the\n// ChangeProcessor.\nclass SyncManagerChangeProcessingTest : public SyncManagerTest {\n public:\n virtual void OnChangesApplied(\n ModelType model_type,\n int64 model_version,\n const BaseTransaction* trans,\n const ImmutableChangeRecordList& changes) OVERRIDE {\n last_changes_ = changes;\n }\n\n virtual void OnChangesComplete(ModelType model_type) OVERRIDE {}\n\n const ImmutableChangeRecordList& GetRecentChangeList() {\n return last_changes_;\n }\n\n UserShare* share() {\n return sync_manager_.GetUserShare();\n }\n\n // Set some flags so our nodes reasonably approximate the real world scenario\n // and can get past CheckTreeInvariants.\n //\n // It's never going to be truly accurate, since we're squashing update\n // receipt, processing and application into a single transaction.\n void SetNodeProperties(syncable::MutableEntry *entry) {\n entry->Put(syncable::ID, id_factory_.NewServerId());\n entry->Put(syncable::BASE_VERSION, 10);\n entry->Put(syncable::SERVER_VERSION, 10);\n }\n\n // Looks for the given change in the list. Returns the index at which it was\n // found. Returns -1 on lookup failure.\n size_t FindChangeInList(int64 id, ChangeRecord::Action action) {\n SCOPED_TRACE(id);\n for (size_t i = 0; i < last_changes_.Get().size(); ++i) {\n if (last_changes_.Get()[i].id == id\n && last_changes_.Get()[i].action == action) {\n return i;\n }\n }\n ADD_FAILURE() << \"Failed to find specified change\";\n return -1;\n }\n\n // Returns the current size of the change list.\n //\n // Note that spurious changes do not necessarily indicate a problem.\n // Assertions on change list size can help detect problems, but it may be\n // necessary to reduce their strictness if the implementation changes.\n size_t GetChangeListSize() {\n return last_changes_.Get().size();\n }\n\n protected:\n ImmutableChangeRecordList last_changes_;\n TestIdFactory id_factory_;\n};\n\n// Test creation of a folder and a bookmark.\nTEST_F(SyncManagerChangeProcessingTest, AddBookmarks) {\n int64 type_root = GetIdForDataType(BOOKMARKS);\n int64 folder_id = kInvalidId;\n int64 child_id = kInvalidId;\n\n // Create a folder and a bookmark under it.\n {\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::SYNCER, share()->directory.get());\n syncable::Entry root(&trans, syncable::GET_BY_HANDLE, type_root);\n ASSERT_TRUE(root.good());\n\n syncable::MutableEntry folder(&trans, syncable::CREATE,\n BOOKMARKS, root.Get(syncable::ID), \"folder\");\n ASSERT_TRUE(folder.good());\n SetNodeProperties(&folder);\n folder.Put(syncable::IS_DIR, true);\n folder_id = folder.Get(syncable::META_HANDLE);\n\n syncable::MutableEntry child(&trans, syncable::CREATE,\n BOOKMARKS, folder.Get(syncable::ID), \"child\");\n ASSERT_TRUE(child.good());\n SetNodeProperties(&child);\n child_id = child.Get(syncable::META_HANDLE);\n }\n\n // The closing of the above scope will delete the transaction. Its processed\n // changes should be waiting for us in a member of the test harness.\n EXPECT_EQ(2UL, GetChangeListSize());\n\n // We don't need to check these return values here. The function will add a\n // non-fatal failure if these changes are not found.\n size_t folder_change_pos =\n FindChangeInList(folder_id, ChangeRecord::ACTION_ADD);\n size_t child_change_pos =\n FindChangeInList(child_id, ChangeRecord::ACTION_ADD);\n\n // Parents are delivered before children.\n EXPECT_LT(folder_change_pos, child_change_pos);\n}\n\n// Test moving a bookmark into an empty folder.\nTEST_F(SyncManagerChangeProcessingTest, MoveBookmarkIntoEmptyFolder) {\n int64 type_root = GetIdForDataType(BOOKMARKS);\n int64 folder_b_id = kInvalidId;\n int64 child_id = kInvalidId;\n\n // Create two folders. Place a child under folder A.\n {\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::SYNCER, share()->directory.get());\n syncable::Entry root(&trans, syncable::GET_BY_HANDLE, type_root);\n ASSERT_TRUE(root.good());\n\n syncable::MutableEntry folder_a(&trans, syncable::CREATE,\n BOOKMARKS, root.Get(syncable::ID), \"folderA\");\n ASSERT_TRUE(folder_a.good());\n SetNodeProperties(&folder_a);\n folder_a.Put(syncable::IS_DIR, true);\n\n syncable::MutableEntry folder_b(&trans, syncable::CREATE,\n BOOKMARKS, root.Get(syncable::ID), \"folderB\");\n ASSERT_TRUE(folder_b.good());\n SetNodeProperties(&folder_b);\n folder_b.Put(syncable::IS_DIR, true);\n folder_b_id = folder_b.Get(syncable::META_HANDLE);\n\n syncable::MutableEntry child(&trans, syncable::CREATE,\n BOOKMARKS, folder_a.Get(syncable::ID),\n \"child\");\n ASSERT_TRUE(child.good());\n SetNodeProperties(&child);\n child_id = child.Get(syncable::META_HANDLE);\n }\n\n // Close that transaction. The above was to setup the initial scenario. The\n // real test starts now.\n\n // Move the child from folder A to folder B.\n {\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::SYNCER, share()->directory.get());\n\n syncable::Entry folder_b(&trans, syncable::GET_BY_HANDLE, folder_b_id);\n syncable::MutableEntry child(&trans, syncable::GET_BY_HANDLE, child_id);\n\n child.Put(syncable::PARENT_ID, folder_b.Get(syncable::ID));\n }\n\n EXPECT_EQ(1UL, GetChangeListSize());\n\n // Verify that this was detected as a real change. An early version of the\n // UniquePosition code had a bug where moves from one folder to another were\n // ignored unless the moved node's UniquePosition value was also changed in\n // some way.\n FindChangeInList(child_id, ChangeRecord::ACTION_UPDATE);\n}\n\n// Test moving a bookmark into a non-empty folder.\nTEST_F(SyncManagerChangeProcessingTest, MoveIntoPopulatedFolder) {\n int64 type_root = GetIdForDataType(BOOKMARKS);\n int64 child_a_id = kInvalidId;\n int64 child_b_id = kInvalidId;\n\n // Create two folders. Place one child each under folder A and folder B.\n {\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::SYNCER, share()->directory.get());\n syncable::Entry root(&trans, syncable::GET_BY_HANDLE, type_root);\n ASSERT_TRUE(root.good());\n\n syncable::MutableEntry folder_a(&trans, syncable::CREATE,\n BOOKMARKS, root.Get(syncable::ID), \"folderA\");\n ASSERT_TRUE(folder_a.good());\n SetNodeProperties(&folder_a);\n folder_a.Put(syncable::IS_DIR, true);\n\n syncable::MutableEntry folder_b(&trans, syncable::CREATE,\n BOOKMARKS, root.Get(syncable::ID), \"folderB\");\n ASSERT_TRUE(folder_b.good());\n SetNodeProperties(&folder_b);\n folder_b.Put(syncable::IS_DIR, true);\n\n syncable::MutableEntry child_a(&trans, syncable::CREATE,\n BOOKMARKS, folder_a.Get(syncable::ID),\n \"childA\");\n ASSERT_TRUE(child_a.good());\n SetNodeProperties(&child_a);\n child_a_id = child_a.Get(syncable::META_HANDLE);\n\n syncable::MutableEntry child_b(&trans, syncable::CREATE,\n BOOKMARKS, folder_b.Get(syncable::ID),\n \"childB\");\n SetNodeProperties(&child_b);\n child_b_id = child_b.Get(syncable::META_HANDLE);\n\n }\n\n // Close that transaction. The above was to setup the initial scenario. The\n // real test starts now.\n\n {\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::SYNCER, share()->directory.get());\n\n syncable::MutableEntry child_a(&trans, syncable::GET_BY_HANDLE, child_a_id);\n syncable::MutableEntry child_b(&trans, syncable::GET_BY_HANDLE, child_b_id);\n\n // Move child A from folder A to folder B and update its position.\n child_a.Put(syncable::PARENT_ID, child_b.Get(syncable::PARENT_ID));\n child_a.PutPredecessor(child_b.Get(syncable::ID));\n }\n\n EXPECT_EQ(1UL, GetChangeListSize());\n\n // Verify that only child a is in the change list.\n // (This function will add a failure if the lookup fails.)\n FindChangeInList(child_a_id, ChangeRecord::ACTION_UPDATE);\n}\n\n// Tests the ordering of deletion changes.\nTEST_F(SyncManagerChangeProcessingTest, DeletionsAndChanges) {\n int64 type_root = GetIdForDataType(BOOKMARKS);\n int64 folder_a_id = kInvalidId;\n int64 folder_b_id = kInvalidId;\n int64 child_id = kInvalidId;\n\n // Create two folders. Place a child under folder A.\n {\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::SYNCER, share()->directory.get());\n syncable::Entry root(&trans, syncable::GET_BY_HANDLE, type_root);\n ASSERT_TRUE(root.good());\n\n syncable::MutableEntry folder_a(&trans, syncable::CREATE,\n BOOKMARKS, root.Get(syncable::ID), \"folderA\");\n ASSERT_TRUE(folder_a.good());\n SetNodeProperties(&folder_a);\n folder_a.Put(syncable::IS_DIR, true);\n folder_a_id = folder_a.Get(syncable::META_HANDLE);\n\n syncable::MutableEntry folder_b(&trans, syncable::CREATE,\n BOOKMARKS, root.Get(syncable::ID), \"folderB\");\n ASSERT_TRUE(folder_b.good());\n SetNodeProperties(&folder_b);\n folder_b.Put(syncable::IS_DIR, true);\n folder_b_id = folder_b.Get(syncable::META_HANDLE);\n\n syncable::MutableEntry child(&trans, syncable::CREATE,\n BOOKMARKS, folder_a.Get(syncable::ID),\n \"child\");\n ASSERT_TRUE(child.good());\n SetNodeProperties(&child);\n child_id = child.Get(syncable::META_HANDLE);\n }\n\n // Close that transaction. The above was to setup the initial scenario. The\n // real test starts now.\n\n {\n syncable::WriteTransaction trans(\n FROM_HERE, syncable::SYNCER, share()->directory.get());\n\n syncable::MutableEntry folder_a(\n &trans, syncable::GET_BY_HANDLE, folder_a_id);\n syncable::MutableEntry folder_b(\n &trans, syncable::GET_BY_HANDLE, folder_b_id);\n syncable::MutableEntry child(&trans, syncable::GET_BY_HANDLE, child_id);\n\n // Delete folder B and its child.\n child.Put(syncable::IS_DEL, true);\n folder_b.Put(syncable::IS_DEL, true);\n\n // Make an unrelated change to folder A.\n folder_a.Put(syncable::NON_UNIQUE_NAME, \"NewNameA\");\n }\n\n EXPECT_EQ(3UL, GetChangeListSize());\n\n size_t folder_a_pos =\n FindChangeInList(folder_a_id, ChangeRecord::ACTION_UPDATE);\n size_t folder_b_pos =\n FindChangeInList(folder_b_id, ChangeRecord::ACTION_DELETE);\n size_t child_pos = FindChangeInList(child_id, ChangeRecord::ACTION_DELETE);\n\n // Deletes should appear before updates.\n EXPECT_LT(child_pos, folder_a_pos);\n EXPECT_LT(folder_b_pos, folder_a_pos);\n}\n\n} // namespace\n", "meta": {"content_hash": "c1554cd8ff509d8b3221f1b041f94392", "timestamp": "", "source": "github", "line_count": 3580, "max_line_length": 80, "avg_line_length": 38.94469273743017, "alnum_prop": 0.6963606891308401, "repo_name": "hujiajie/pa-chromium", "id": "7ef301466ed7613529d1f598226b23acca6881da", "size": "139422", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "sync/internal_api/sync_manager_impl_unittest.cc", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "ASP", "bytes": "853"}, {"name": "AppleScript", "bytes": "6973"}, {"name": "Arduino", "bytes": "464"}, {"name": "Assembly", "bytes": "1176000"}, {"name": "Awk", "bytes": "9519"}, {"name": "C", "bytes": "74909991"}, {"name": "C#", "bytes": "1132"}, {"name": "C++", "bytes": "174451678"}, {"name": "DOT", "bytes": "1559"}, {"name": "F#", "bytes": "381"}, {"name": "Java", "bytes": "3787983"}, {"name": "JavaScript", "bytes": "19649328"}, {"name": "Logos", "bytes": "4517"}, {"name": "M", "bytes": "2190"}, {"name": "Matlab", "bytes": "3044"}, {"name": "Objective-C", "bytes": "7600008"}, {"name": "PHP", "bytes": "97817"}, {"name": "Perl", "bytes": "1473407"}, {"name": "Python", "bytes": "10022969"}, {"name": "R", "bytes": "262"}, {"name": "Shell", "bytes": "1590292"}, {"name": "Tcl", "bytes": "277077"}, {"name": "XSLT", "bytes": "13493"}]}} {"text": "\r\n// Copyright (c) 2014, Oracle and/or its affiliates.\r\n\r\n// Contributed and/or modified by Menelaos Karavelas, on behalf of Oracle\r\n\r\n// Licensed under the Boost Software License version 1.0.\r\n// http://www.boost.org/users/license.html\r\n\r\n#include \r\n\r\n#include \r\n#include \r\n#include \r\n\r\n\r\nnamespace bg = boost::geometry;\r\n\r\n\r\ntemplate \r\ninline void test_coordinate_system()\r\n{\r\n typedef bg::model::point bg_double_point;\r\n typedef bg::model::point bg_int_point;\r\n\r\n typedef rw_lon_lat_point rw_double_point;\r\n typedef ro_lon_lat_point ro_double_point;\r\n\r\n typedef rw_lon_lat_point rw_int_point;\r\n typedef ro_lon_lat_point ro_int_point;\r\n\r\n bg::concept::check();\r\n bg::concept::check();\r\n\r\n bg::concept::check();\r\n bg::concept::check();\r\n\r\n bg::concept::check();\r\n bg::concept::check();\r\n bg::concept::check();\r\n\r\n bg::concept::check();\r\n bg::concept::check();\r\n bg::concept::check();\r\n}\r\n\r\n\r\nint main()\r\n{\r\n test_coordinate_system >();\r\n test_coordinate_system >();\r\n\r\n test_coordinate_system >();\r\n test_coordinate_system >();\r\n\r\n test_coordinate_system >();\r\n test_coordinate_system >();\r\n\r\n test_coordinate_system >();\r\n test_coordinate_system >();\r\n\r\n return 0;\r\n}\r\n", "meta": {"content_hash": "098700a3746a729e8eaeea9345ae0414", "timestamp": "", "source": "github", "line_count": 62, "max_line_length": 74, "avg_line_length": 32.67741935483871, "alnum_prop": 0.6742349457058243, "repo_name": "Franky666/programmiersprachen-raytracer", "id": "36166d85b5049516033bb9b4da8d0b3e7e0752d1", "size": "2095", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "external/boost_1_59_0/libs/geometry/test/concepts/point_well_formed_non_cartesian.cpp", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "905071"}, {"name": "C++", "bytes": "46207"}, {"name": "CMake", "bytes": "4419"}]}} {"text": "package com.kerchin.yellownote.data.event;\n\n/**\n * Created by hkq325800 on 2016/12/7.\n */\n\npublic class NoteDeleteErrorEvent {\n public String getStr() {\n return str;\n }\n\n public void setStr(String str) {\n this.str = str;\n }\n\n private String str;\n\n public NoteDeleteErrorEvent(String str) {\n this.str = str;\n }\n}\n", "meta": {"content_hash": "3afe4b0d3020a8947953f1436713aeae", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 45, "avg_line_length": 16.857142857142858, "alnum_prop": 0.6129943502824858, "repo_name": "hkq325800/YellowNote", "id": "51bc5dab46fe49e839ca24f808fa1a2a3157ebef", "size": "354", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/kerchin/yellownote/data/event/NoteDeleteErrorEvent.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "959303"}]}} {"text": "class Room\n\n attr_accessor :name, :description, :paths\n\n def initialize(name, description)\n @name = name\n @description = description\n @paths = {}\n end\n\n def go(direction)\n @paths[direction]\n end\n\n def add_paths(paths)\n @paths.update(paths)\n end\n\nend\n\ncentral_corridor = Room.new(\"Central Corridor\",\n%q{\nThe Gothons of Planet Percal #25 have invaded your ship and destroyed\nyour entire crew. You are the last surviving member and your last\nmission is to get the neutron destruct bomb from the Weapons Armory,\nput it in the bridge, and blow the ship up after getting into an \nescape pod.\n\nYou're running down the central corridor to the Weapons Armory when\na Gothon jumps out, red scaly skin, dark grimy teeth, and evil clown costume\nflowing around his hate filled body. He's blocking the door to the\nArmory and about to pull a weapon to blast you.\n})\n\nlaser_weapon_armory = Room.new(\"Laser Weapon Armory\", \n%q{\n Lucky for you they made you learn Gothon insults in the academy.\nYou tell the one Gothon joke you know:\nLbhe zbgure vf fb sng, jura fur fvgf nebhaq gur ubhfr, fur fvgf nebhaq gur ubhfr.\nThe Gothon stops, tries not to laugh, then busts out laughing and can't move.\nWhile he's laughing you run up and shoot him square in the head\nputting him down, then jump through the Weapon Armory door.\n\nYou do a dive roll into the Weapon Armory, crouch and scan the room\nfor more Gothons that might be hiding. It's dead quiet, too quiet.\nYou stand up and run to the far side of the room and find the\nneutron bomb in its container. There's a keypad lock on the box\nand you need the code to get the bomb out. If you get the code\nwrong 10 times then the lock closes forever and you can't\nget the bomb. The code is 3 digits.\n})\n\nthe_bridge = Room.new(\"The Bridge\",\n%q{\nThe container clicks open and the seal breaks, letting gas out.\nYou grab the neutron bomb and run as fast as you can to the\nbridge where you must place it in the right spot.\n\nYou burst onto the Bridge with the neutron destruct bomb\nunder your arm and surprise 5 Gothons who are trying to\ntake control of the ship. Each of them has an even uglier\nclown costume than the last. They haven't pulled their\nweapons out yet, as they see the active bomb under your\narm and don't want to set it off.\n})\n\nescape_pod = Room.new(\"Escape Pod\",\n%q{\nYou point your blaster at the bomb under your arm\nand the Gothons put their hands up and start to sweat.\nYou inch backward to the door, open it, and then carefully\nplace the bomb on the floor, pointing your blaster at it.\nYou then jump back through the door, punch the close button\nand blast the lock so the Gothons can't get out.\nNow that the bomb is placed you run to the escape pod to\nget off this tin can.\n\nYou rush through the ship desperately trying to make it to\nthe escape pod before the whole ship explodes. It seems like\nhardly any Gothons are on the ship, so your run is clear of\ninterference. You get to the chamber with the escape pods, and\nnow need to pick one to take. Some of them could be damaged\nbut you don't have time to look. There's 5 pods, which one\ndo you take?\n})\n\nthe_end_winner = Room.new(\"The End\",\n%q{\nYou jump into pod 2 and hit the eject button.\nThe pod easily slides out into space heading to\nthe planet below. As it flies to the planet, you look\nback and see your ship implode then explode like a\nbright star, taking out the Gothon ship at the same\ntime. You won!\n})\n\nthe_end_loser = Room.new(\"The End\",\n%q{\nYou jump into a random pod and hit the eject button.\nThe pod escapes out into the void of space, then\nimplodes as the hull ruptures, crushing your body\ninto jam jelly.\n})\n\nescape_pod.add_paths({\n '2' => the_end_winner,\n '*' => the_end_loser\n})\n\ncentral_corridor_shoot = Room.new(\"Death\",\n%q{\nQuick on the draw you yank out your blaster and fire it at the Gothon.\nHis clown costume is flowing and moving around his body, which throws\noff your aim. Your laser hits his costume but misses him entirely. This\ncompletely ruins his brand new costume his mother bought him, which makes\nhim fly into an insane rage and blast you repeatedly in the face unitl you\nare dead. Then he eats you.\n})\n\ncentral_corridor_dodge = Room.new(\"Death\",\n%q{\nLike a world class boxer you dodge, weave, slip and slide right as the Gothon's\nblaster cranks a laser past your head. In the middle of your artful dodge\nyour foot slips and you bang your head on the metal wall and pass out. You wake up\nshortly after only to die as the Gothon stomps on your head and eats you.\n})\n\nthe_bridge_bomb = Room.new(\"Death\",\n%q{\nIn a panic you throw the bomb at the group of Gothons and make a leap for the\ndoor. Right as you drop it a Gothon shoots you right in hte back killing you.\nAs you die you see another Gothon frantically tyr to disarm the bomb. You die\nknowing they will probably blow up when it goes off.\n})\n\nlaser_weapon_armory_fail = Room.new(\"Death\",\n%q{\nThe lock buzzes one last time and then you hear a sickening melting sound\nas the mechanism is fused together. You decide to sit there, and finally\nthe Gothons blow up the ship from their ship and you die.\n})\n\nthe_bridge.add_paths({\n 'throw the bomb' => the_bridge_bomb,\n 'slowly place the bomb' => escape_pod\n})\n\nlaser_weapon_armory.add_paths({\n '0132' => the_bridge,\n '*' => laser_weapon_armory_fail\n})\n\ncentral_corridor.add_paths({\n 'shoot!' => central_corridor_shoot,\n 'dodge!' => central_corridor_dodge,\n 'tell a joke' => laser_weapon_armory\n})\n\nSTART = central_corridor\n\nROOMS = { \"central_corridor\" => central_corridor,\n \"central_corridor_shoot\" => central_corridor_shoot,\n \"central_corridor_dodge\" => central_corridor_dodge, \n \"laser_weapon_armory\" => laser_weapon_armory,\n \"laser_weapon_armory_fail\" => laser_weapon_armory_fail,\n \"the_bridge\" => the_bridge,\n \"the_bridge_bomb\" => the_bridge_bomb,\n \"escape_pod\" => escape_pod,\n \"the_end_winner\" => the_end_winner,\n \"the_end_loser\" => the_end_loser }", "meta": {"content_hash": "79bdf1f5afb031f0b6f0d47bc5dbb008", "timestamp": "", "source": "github", "line_count": 170, "max_line_length": 82, "avg_line_length": 35.72941176470588, "alnum_prop": 0.724399078037537, "repo_name": "j0rgy/gothonweb", "id": "0a5521729c53f049e097cae019927020d7ab463a", "size": "6074", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/map.rb", "mode": "33188", "license": "mit", "language": [{"name": "Ruby", "bytes": "11917"}]}} {"text": "var startgame = function(){\n\t location.href = \"http:localhost:8080/play\";\n};", "meta": {"content_hash": "7385feb6712caa9db6b09603d43682d3", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 45, "avg_line_length": 25.333333333333332, "alnum_prop": 0.6973684210526315, "repo_name": "abhilash-dev/professor-X", "id": "2c2bb6a20688dd06c71cf99437e2c413400839d7", "size": "76", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Professor-X/src/main/resources/static/js/start.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "47246"}, {"name": "HTML", "bytes": "22208"}, {"name": "Java", "bytes": "69219"}, {"name": "JavaScript", "bytes": "17123"}]}} {"text": "export interface Middleware {\n (context: T, next: ContinuationMiddleware): any\n}\nexport interface ContinuationMiddleware {\n (context?: T, next?: Middleware): any\n}\n\nconst flatten = (values: Array>) => ([] as any).concat(...values) as T[],\n noop = function noop() {\n return Promise.resolve()\n }\n\nfunction throwIfHasBeenCalled(fn: any) {\n if (fn.__appbuildercalled) {\n throw new Error('Cannot call next more than once')\n }\n return (fn.__appbuildercalled = true)\n}\n\nfunction tryInvokeMiddleware(context: T, middleware: Middleware, next: ContinuationMiddleware = noop) {\n try {\n return Promise.resolve(middleware ? middleware(context, next) : context)\n } catch (error) {\n return Promise.reject(error)\n }\n}\n\nexport function functionList(list: Function | Function[], ...args: any[]): Middleware[] {\n const arrayList = Symbol.iterator in list ? Array.from(list as Function[]) : [list as Function]\n return arrayList.map((x) => {\n return (_: any, next: any) => Promise.resolve(x(...args)).then(next)\n })\n}\n\n/**\n * Create a function to invoke all passed middleware functions\n * with a single argument context\n * @param middleware\n */\nexport function compose(...middleware: (Middleware | Middleware[])[]): ContinuationMiddleware {\n return flatten(middleware)\n .filter((x) => {\n if ('function' !== typeof x) {\n throw new TypeError(`${x}, must be a middleware function accpeting (context, next) arguments`)\n }\n return x\n })\n .reduceRight((composed: Middleware, mw: Middleware) => {\n return function (context: T, nextFn: ContinuationMiddleware) {\n const next = () => throwIfHasBeenCalled(next) && composed(context, nextFn)\n return tryInvokeMiddleware(context, mw, next)\n }\n }, tryInvokeMiddleware) as ContinuationMiddleware\n}\n", "meta": {"content_hash": "6ae61642b7c58194f1728c38a9412b57", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 113, "avg_line_length": 34.833333333333336, "alnum_prop": 0.6634768740031898, "repo_name": "calebboyd/app-builder", "id": "3d2827115e8ff8387651397c026eedf9eed49bbd", "size": "1881", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/compose.ts", "mode": "33188", "license": "mit", "language": [{"name": "TypeScript", "bytes": "6288"}]}} {"text": "/**\n * A simple custom row expander that does not add a \"+\" / \"-\" column.\n */\nExt.define('ExecDashboard.ux.plugin.RowExpander', {\n extend: 'Ext.grid.plugin.RowExpander',\n\n alias: 'plugin.ux-rowexpander',\n\n rowBodyTpl: [\n '
',\n '
',\n '
{paragraph}
',\n '
COLLAPSE
',\n '
',\n '
'\n ],\n\n // don't add the expander +/- because we will use a custom one instead\n addExpander: Ext.emptyFn,\n\n addCollapsedCls: {\n fn: function(out, values, parent) {\n var me = this.rowExpander;\n\n if (!me.recordsExpanded[values.record.internalId]) {\n values.itemClasses.push(me.rowCollapsedCls);\n } else {\n values.itemClasses.push('x-grid-row-expanded');\n }\n this.nextTpl.applyOut(values, out, parent);\n },\n\n syncRowHeights: function(lockedItem, normalItem) {\n this.rowExpander.syncRowHeights(lockedItem, normalItem);\n },\n\n // We need a high priority to get in ahead of the outerRowTpl\n // so we can setup row data\n priority: 20000\n }\n});\n", "meta": {"content_hash": "9423061655f413b2039420abbaea9703", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 201, "avg_line_length": 34.292682926829265, "alnum_prop": 0.5839260312944523, "repo_name": "ralscha/extdirectspring-demo", "id": "37024e5bb022671dcdaff7954f911359cbec2c0f", "size": "1406", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/resources/static/extjs6classic/executive-dashboard/app/ux/plugin/RowExpander.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "683"}, {"name": "CSS", "bytes": "17363"}, {"name": "HTML", "bytes": "136892"}, {"name": "Java", "bytes": "370333"}, {"name": "JavaScript", "bytes": "1171254"}]}} {"text": "using System;\nusing NUnit.Framework;\nusing osu.Framework.Allocation;\nusing osu.Framework.Bindables;\nusing osu.Framework.Graphics;\nusing osu.Framework.Graphics.Containers;\nusing osu.Game.Online.Rooms;\nusing osu.Game.Screens.OnlinePlay;\n\nnamespace osu.Game.Tests.Visual.OnlinePlay\n{\n /// \n /// A base test scene for all online play components and screens.\n /// \n public abstract class OnlinePlayTestScene : ScreenTestScene, IOnlinePlayTestSceneDependencies\n {\n public Bindable SelectedRoom => OnlinePlayDependencies?.SelectedRoom;\n public IRoomManager RoomManager => OnlinePlayDependencies?.RoomManager;\n public OngoingOperationTracker OngoingOperationTracker => OnlinePlayDependencies?.OngoingOperationTracker;\n public OnlinePlayBeatmapAvailabilityTracker AvailabilityTracker => OnlinePlayDependencies?.AvailabilityTracker;\n\n /// \n /// All dependencies required for online play components and screens.\n /// \n protected OnlinePlayTestSceneDependencies OnlinePlayDependencies => dependencies?.OnlinePlayDependencies;\n\n private DelegatedDependencyContainer dependencies;\n\n protected override Container Content => content;\n private readonly Container content;\n private readonly Container drawableDependenciesContainer;\n\n protected OnlinePlayTestScene()\n {\n base.Content.AddRange(new Drawable[]\n {\n drawableDependenciesContainer = new Container { RelativeSizeAxes = Axes.Both },\n content = new Container { RelativeSizeAxes = Axes.Both },\n });\n }\n\n protected override IReadOnlyDependencyContainer CreateChildDependencies(IReadOnlyDependencyContainer parent)\n {\n dependencies = new DelegatedDependencyContainer(base.CreateChildDependencies(parent));\n return dependencies;\n }\n\n [SetUp]\n public void Setup() => Schedule(() =>\n {\n // Reset the room dependencies to a fresh state.\n drawableDependenciesContainer.Clear();\n dependencies.OnlinePlayDependencies = CreateOnlinePlayDependencies();\n drawableDependenciesContainer.AddRange(OnlinePlayDependencies.DrawableComponents);\n });\n\n /// \n /// Creates the room dependencies. Called every .\n /// \n /// \n /// Any custom dependencies required for online play sub-classes should be added here.\n /// \n protected virtual OnlinePlayTestSceneDependencies CreateOnlinePlayDependencies() => new OnlinePlayTestSceneDependencies();\n\n /// \n /// A providing a mutable lookup source for online play dependencies.\n /// \n private class DelegatedDependencyContainer : IReadOnlyDependencyContainer\n {\n /// \n /// The online play dependencies.\n /// \n public OnlinePlayTestSceneDependencies OnlinePlayDependencies { get; set; }\n\n private readonly IReadOnlyDependencyContainer parent;\n private readonly DependencyContainer injectableDependencies;\n\n /// \n /// Creates a new .\n /// \n /// The fallback to use when cannot satisfy a dependency.\n public DelegatedDependencyContainer(IReadOnlyDependencyContainer parent)\n {\n this.parent = parent;\n injectableDependencies = new DependencyContainer(this);\n }\n\n public object Get(Type type)\n => OnlinePlayDependencies?.Get(type) ?? parent.Get(type);\n\n public object Get(Type type, CacheInfo info)\n => OnlinePlayDependencies?.Get(type, info) ?? parent.Get(type, info);\n\n public void Inject(T instance)\n where T : class\n => injectableDependencies.Inject(instance);\n }\n }\n}\n", "meta": {"content_hash": "736eb04da0b759ce60643001076f0352", "timestamp": "", "source": "github", "line_count": 99, "max_line_length": 178, "avg_line_length": 42.90909090909091, "alnum_prop": 0.6664312617702448, "repo_name": "peppy/osu-new", "id": "87166460747311eea3c23459cf12a782158c8427", "size": "4398", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "osu.Game/Tests/Visual/OnlinePlay/OnlinePlayTestScene.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "281779"}]}} {"text": "\r\n/*\r\n * Created on May 6, 2005\r\n *\r\n */\r\nnamespace NPOI.SS.Formula.Functions\r\n{\r\n using System;\r\n using NPOI.SS.Formula.Eval;\r\n\r\n /**\r\n * @author Amol S. Deshmukh < amolweb at ya hoo dot com >\r\n * \r\n */\r\n internal class True : Fixed0ArgFunction\r\n {\r\n public override ValueEval Evaluate(int srcRowIndex, int srcColumnIndex)\r\n {\r\n return BoolEval.TRUE;\r\n }\r\n\r\n }\r\n}", "meta": {"content_hash": "3dac27e2d71e747a523afb589cf0a7f3", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 79, "avg_line_length": 18.82608695652174, "alnum_prop": 0.5496535796766744, "repo_name": "treenew/sofire", "id": "891d8a2a05c1ed4945d09ab5bed7a8831ebb4580", "size": "1234", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Core/Sofire.Extends/Excel/NPOI/SS/Formula/Functions/Boolean/True.cs", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C#", "bytes": "17254659"}, {"name": "Shell", "bytes": "682"}]}} {"text": "\n\n#include \"BaseLibInternals.h\"\n\n/**\n Multiples a 64-bit unsigned integer by a 32-bit unsigned integer and\n generates a 64-bit unsigned result.\n\n This function multiples the 64-bit unsigned value Multiplicand by the 32-bit\n unsigned value Multiplier and generates a 64-bit unsigned result. This 64-\n bit unsigned result is returned.\n\n @param Multiplicand A 64-bit unsigned value.\n @param Multiplier A 32-bit unsigned value.\n\n @return Multiplicand * Multiplier\n\n**/\nUINT64\nEFIAPI\nGlueMultU64x32 (\n IN UINT64 Multiplicand,\n IN UINT32 Multiplier\n )\n{\n UINT64 Result;\n\n Result = InternalMathMultU64x32 (Multiplicand, Multiplier);\n\n return Result;\n}\n", "meta": {"content_hash": "882efd20bf0166267fbdc1bda59412dd", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 78, "avg_line_length": 23.870967741935484, "alnum_prop": 0.6716216216216216, "repo_name": "tianocore/edk", "id": "93b1411b1ea2179ef002c4168005e2e517b3fdf8", "size": "1550", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Foundation/Library/EdkIIGlueLib/Library/BaseLib/MultU64x32.c", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Assembly", "bytes": "947898"}, {"name": "C", "bytes": "18163757"}, {"name": "C++", "bytes": "1437397"}, {"name": "GAP", "bytes": "256118"}, {"name": "Objective-C", "bytes": "2657772"}, {"name": "Shell", "bytes": "60476"}]}} {"text": "import Notifications from './Notifications.js';\nexport default Notifications;\n", "meta": {"content_hash": "23bf4d090837565ef76e7fb1c41a3b01", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 47, "avg_line_length": 39.0, "alnum_prop": 0.8205128205128205, "repo_name": "Axivity/openmovement-axsys-client", "id": "1d62ec7049974c62d8ccd4cb2c158a52ddce63ae", "size": "78", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vendor/react-notifications/src/index.js", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "CSS", "bytes": "3987"}, {"name": "HTML", "bytes": "2091"}, {"name": "JavaScript", "bytes": "150611"}]}} {"text": ".class public Lcom/android/server/pm/PersistentPreferredIntentResolver;\n.super Lcom/android/server/IntentResolver;\n.source \"PersistentPreferredIntentResolver.java\"\n\n\n# annotations\n.annotation system Ldalvik/annotation/Signature;\n value = {\n \"Lcom/android/server/IntentResolver\",\n \"<\",\n \"Lcom/android/server/pm/PersistentPreferredActivity;\",\n \"Lcom/android/server/pm/PersistentPreferredActivity;\",\n \">;\"\n }\n.end annotation\n\n\n# direct methods\n.method public constructor ()V\n .locals 0\n\n .prologue\n .line 23\n invoke-direct {p0}, Lcom/android/server/IntentResolver;->()V\n\n return-void\n.end method\n\n\n# virtual methods\n.method protected bridge synthetic isPackageForFilter(Ljava/lang/String;Landroid/content/IntentFilter;)Z\n .locals 1\n .param p1, \"x0\" # Ljava/lang/String;\n .param p2, \"x1\" # Landroid/content/IntentFilter;\n\n .prologue\n .line 23\n check-cast p2, Lcom/android/server/pm/PersistentPreferredActivity;\n\n .end local p2 # \"x1\":Landroid/content/IntentFilter;\n invoke-virtual {p0, p1, p2}, Lcom/android/server/pm/PersistentPreferredIntentResolver;->isPackageForFilter(Ljava/lang/String;Lcom/android/server/pm/PersistentPreferredActivity;)Z\n\n move-result v0\n\n return v0\n.end method\n\n.method protected isPackageForFilter(Ljava/lang/String;Lcom/android/server/pm/PersistentPreferredActivity;)Z\n .locals 1\n .param p1, \"packageName\" # Ljava/lang/String;\n .param p2, \"filter\" # Lcom/android/server/pm/PersistentPreferredActivity;\n\n .prologue\n .line 32\n iget-object v0, p2, Lcom/android/server/pm/PersistentPreferredActivity;->mComponent:Landroid/content/ComponentName;\n\n invoke-virtual {v0}, Landroid/content/ComponentName;->getPackageName()Ljava/lang/String;\n\n move-result-object v0\n\n invoke-virtual {p1, v0}, Ljava/lang/String;->equals(Ljava/lang/Object;)Z\n\n move-result v0\n\n return v0\n.end method\n\n.method protected bridge synthetic newArray(I)[Landroid/content/IntentFilter;\n .locals 1\n .param p1, \"x0\" # I\n\n .prologue\n .line 23\n invoke-virtual {p0, p1}, Lcom/android/server/pm/PersistentPreferredIntentResolver;->newArray(I)[Lcom/android/server/pm/PersistentPreferredActivity;\n\n move-result-object v0\n\n return-object v0\n.end method\n\n.method protected newArray(I)[Lcom/android/server/pm/PersistentPreferredActivity;\n .locals 1\n .param p1, \"size\" # I\n\n .prologue\n .line 27\n new-array v0, p1, [Lcom/android/server/pm/PersistentPreferredActivity;\n\n return-object v0\n.end method\n", "meta": {"content_hash": "f0dbbe5a7aa396ea4f36b943cfe29952", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 182, "avg_line_length": 28.5, "alnum_prop": 0.7228070175438597, "repo_name": "hexiaoshuai/Flyme_device_ZTE_A1", "id": "7d0c6cd70cf586abf4521839e05ebd3f5c60f508", "size": "2565", "binary": false, "copies": "2", "ref": "refs/heads/C880AV1.0.0B06", "path": "services.jar.out/smali/com/android/server/pm/PersistentPreferredIntentResolver.smali", "mode": "33188", "license": "apache-2.0", "language": [{"name": "GLSL", "bytes": "1500"}, {"name": "HTML", "bytes": "10195"}, {"name": "Makefile", "bytes": "11258"}, {"name": "Python", "bytes": "924"}, {"name": "Shell", "bytes": "2734"}, {"name": "Smali", "bytes": "234274633"}]}} {"text": "export * from \"./from-exports\";\n", "meta": {"content_hash": "943b6e87d11b9904fa19cc67ed91983c", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 31, "avg_line_length": 32.0, "alnum_prop": 0.65625, "repo_name": "stealjs/steal-tools", "id": "f1addbd36840c6b3da5f8b37ec36dc17800f36d5", "size": "33", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "test/treeshake/disabled/reexports.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "454"}, {"name": "HTML", "bytes": "26153"}, {"name": "JavaScript", "bytes": "1116049"}, {"name": "Less", "bytes": "1521"}]}} {"text": "@interface demoUITests : XCTestCase\n\n@end\n\n@implementation demoUITests\n\n- (void)setUp {\n [super setUp];\n \n // Put setup code here. This method is called before the invocation of each test method in the class.\n \n // In UI tests it is usually best to stop immediately when a failure occurs.\n self.continueAfterFailure = NO;\n // UI tests must launch the application that they test. Doing this in setup will make sure it happens for each test method.\n [[[XCUIApplication alloc] init] launch];\n \n // In UI tests it\u2019s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.\n}\n\n- (void)tearDown {\n // Put teardown code here. This method is called after the invocation of each test method in the class.\n [super tearDown];\n}\n\n- (void)testExample {\n // Use recording to get started writing UI tests.\n // Use XCTAssert and related functions to verify your tests produce the correct results.\n}\n\n@end\n", "meta": {"content_hash": "fbde17640562b01554a7d611a5455993", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 178, "avg_line_length": 34.3, "alnum_prop": 0.7152575315840622, "repo_name": "zhangjie579/ZJPhotoBrower", "id": "9493072379a33bdd01fcb5f60093a36b19cdf6a7", "size": "1191", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "demoUITests/demoUITests.m", "mode": "33261", "license": "mit", "language": [{"name": "Objective-C", "bytes": "2073"}, {"name": "Ruby", "bytes": "647"}]}} {"text": "\n\n\n\n\n\nLua-API++: Class Index\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n\n
\n\n
\n
\n\n\n
\n\n
\n\n
\n
\n
Class Index
\n
\n
\n
C | N | R | S | T | V
\n\n\n\n\n\n\n\n\n\n\n
  C  
\n
  R  
\n
  T  
\n
Valref (lua)   
Valset (lua)   
ClosureInfo (lua)   Context::Registry (lua)   Table (lua)   Value (lua)   
Context (lua)   RegistryKey (lua)   Temporary (lua)   
  N  
\n
Retval (lua)   TypeID (lua)   
  S  
\n
  V  
\n
Nil (lua)   
State (lua)   Valobj (lua)   
\n
C | N | R | S | T | V
\n
\n
\n\n
\n
    \n
  • Generated by\n \n \"doxygen\"/ 1.8.11
  • \n
\n
\n\n\n", "meta": {"content_hash": "82adb62672427a69ba38bee4f7010e5a", "timestamp": "", "source": "github", "line_count": 145, "max_line_length": 635, "avg_line_length": 62.04137931034483, "alnum_prop": 0.630057803468208, "repo_name": "tnovits-d2d/luapp", "id": "1958466a8b2e57a52274b9d635d6c3e0f7b20dc7", "size": "8996", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/html/classes.html", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "918"}, {"name": "C++", "bytes": "317526"}, {"name": "Objective-C", "bytes": "511"}]}} {"text": "package com.lithiumsheep.jacketapp;\n\nimport org.junit.Test;\n\nimport static org.junit.Assert.assertEquals;\n\n/**\n * Example local unit test, which will execute on the development machine (host).\n *\n * @see Testing documentation\n */\npublic class ExampleUnitTest {\n @Test\n public void addition_isCorrect() throws Exception {\n assertEquals(4, 2 + 2);\n }\n}", "meta": {"content_hash": "07d6825d0c75c153af3253ecc3a8a832", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 81, "avg_line_length": 24.41176470588235, "alnum_prop": 0.7060240963855422, "repo_name": "lolsheeplol/jacketapp", "id": "dca56c0a239fb244077f7d789a21d3506278e552", "size": "415", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/test/java/com/lithiumsheep/jacketapp/ExampleUnitTest.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "59431"}]}} {"text": "\n\n 4.0.0\n \n org.kie.server\n kie-server-parent\n 7.0.0-SNAPSHOT\n \n\n kie-server-controller\n KIE :: Execution Server :: Controller\n KIE Execution Server Controller\n pom\n\n \n kie-server-controller-api\n kie-server-controller-impl\n kie-server-controller-rest\n kie-server-controller-test-war\n \n\n", "meta": {"content_hash": "300c41f75effbe997a0100e753cd1dc0", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 108, "avg_line_length": 36.25, "alnum_prop": 0.6988505747126437, "repo_name": "murador/droolsjbpm-integration", "id": "41a9dcfee9bcbcba77722e45a66d081de28bd73e", "size": "870", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "kie-server-parent/kie-server-controller/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2569"}, {"name": "CSS", "bytes": "7748"}, {"name": "HTML", "bytes": "2654"}, {"name": "Java", "bytes": "5623797"}, {"name": "JavaScript", "bytes": "32051"}, {"name": "Shell", "bytes": "3525"}, {"name": "XSLT", "bytes": "2865"}]}} {"text": "package com.unnamed.b.atv.sample.holder;\n\nimport android.content.Context;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.widget.CheckBox;\nimport android.widget.CompoundButton;\nimport android.widget.TextView;\n\nimport com.github.johnkil.print.PrintView;\nimport com.unnamed.b.atv.model.TreeNode;\nimport com.unnamed.b.atv.sample.R;\n\n/**\n * Created by Bogdan Melnychuk on 2/15/15, modified by Szigeti Peter 2/2/16.\n */\npublic class ArrowExpandSelectableHeaderHolder extends TreeNode.BaseNodeViewHolder {\n private TextView tvValue;\n private PrintView arrowView;\n private CheckBox nodeSelector;\n\n public ArrowExpandSelectableHeaderHolder(Context context) {\n super(context);\n }\n\n @Override\n public View createNodeView(final TreeNode node, IconTreeItemHolder.IconTreeItem value) {\n final LayoutInflater inflater = LayoutInflater.from(context);\n final View view = inflater.inflate(R.layout.layout_selectable_header, null, false);\n\n tvValue = (TextView) view.findViewById(R.id.node_value);\n tvValue.setText(value.text);\n\n final PrintView iconView = (PrintView) view.findViewById(R.id.icon);\n iconView.setIconText(context.getResources().getString(value.icon));\n\n arrowView = (PrintView) view.findViewById(R.id.arrow_icon);\n arrowView.setPadding(20,10,10,10);\n if (node.isLeaf()) {\n arrowView.setVisibility(View.GONE);\n }\n arrowView.setOnClickListener(new View.OnClickListener() {\n @Override\n public void onClick(View view) {\n tView.toggleNode(node);\n }\n });\n\n nodeSelector = (CheckBox) view.findViewById(R.id.node_selector);\n nodeSelector.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {\n @Override\n public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {\n node.setSelected(isChecked);\n for (TreeNode n : node.getChildren()) {\n getTreeView().selectNode(n, isChecked);\n }\n }\n });\n nodeSelector.setChecked(node.isSelected());\n\n return view;\n }\n\n @Override\n public void toggle(boolean active) {\n arrowView.setIconText(context.getResources().getString(active ? R.string.ic_keyboard_arrow_down : R.string.ic_keyboard_arrow_right));\n }\n\n @Override\n public void toggleSelectionMode(boolean editModeEnabled) {\n nodeSelector.setVisibility(editModeEnabled ? View.VISIBLE : View.GONE);\n nodeSelector.setChecked(mNode.isSelected());\n }\n}\n", "meta": {"content_hash": "e9aafb0eae131a7ebffe799aa7bd4f78", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 141, "avg_line_length": 36.054054054054056, "alnum_prop": 0.6829085457271364, "repo_name": "yehe01/AndroidTreeView", "id": "c58d0842a856a86b7196b5357d9f09d0dc2c06db", "size": "2668", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "app/src/main/java/com/unnamed/b/atv/sample/holder/ArrowExpandSelectableHeaderHolder.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "120189"}]}} {"text": "\ufeffusing LmpClient.Base;\nusing LmpClient.Base.Interface;\nusing LmpClient.Extensions;\nusing LmpCommon.Enums;\nusing LmpCommon.Message.Data.Kerbal;\nusing LmpCommon.Message.Interface;\nusing LmpCommon.Message.Types;\nusing System.Collections.Concurrent;\n\nnamespace LmpClient.Systems.KerbalSys\n{\n public class KerbalMessageHandler : SubSystem, IMessageHandler\n {\n public ConcurrentQueue IncomingMessages { get; set; } = new ConcurrentQueue();\n\n public void HandleMessage(IServerMessageBase msg)\n {\n if (!(msg.Data is KerbalBaseMsgData msgData)) return;\n\n switch (msgData.KerbalMessageType)\n {\n case KerbalMessageType.Remove:\n System.KerbalsToRemove.Enqueue(((KerbalRemoveMsgData)msgData).KerbalName);\n break;\n case KerbalMessageType.Reply:\n HandleKerbalReply(msgData as KerbalReplyMsgData);\n break;\n case KerbalMessageType.Proto:\n HandleKerbalProto(msgData as KerbalProtoMsgData);\n break;\n default:\n LunaLog.LogError(\"[LMP]: Invalid Kerbal message type\");\n break;\n }\n }\n\n /// \n /// Just load the received kerbal into game and refresh dialogs\n /// \n private static void HandleKerbalProto(KerbalProtoMsgData messageData)\n {\n ProcessKerbal(messageData.Kerbal.KerbalData, messageData.Kerbal.NumBytes);\n }\n\n /// \n /// Appends the received kerbal to the dictionary\n /// \n private static void ProcessKerbal(byte[] kerbalData, int numBytes)\n {\n var kerbalNode = kerbalData.DeserializeToConfigNode(numBytes);\n if (kerbalNode != null)\n {\n System.KerbalsToProcess.Enqueue(kerbalNode);\n }\n else\n LunaLog.LogError(\"[LMP]: Failed to load kerbal!\");\n }\n\n /// \n /// We store all the kerbals in the KerbalProtoQueue dictionary so later once the game starts we load them\n /// \n /// \n private static void HandleKerbalReply(KerbalReplyMsgData messageData)\n {\n for (var i = 0; i < messageData.KerbalsCount; i++)\n {\n ProcessKerbal(messageData.Kerbals[i].KerbalData, messageData.Kerbals[i].NumBytes);\n }\n\n LunaLog.Log(\"[LMP]: Kerbals Synced!\");\n MainSystem.NetworkState = ClientState.KerbalsSynced;\n }\n }\n}\n", "meta": {"content_hash": "2a1675195d29087b473ac18426494fdc", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 126, "avg_line_length": 36.648648648648646, "alnum_prop": 0.6010324483775811, "repo_name": "gavazquez/LunaMultiPlayer", "id": "8fcb7fdfb64dfb91f1784867c3f456374d3cd524", "size": "2714", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "LmpClient/Systems/KerbalSys/KerbalMessageHandler.cs", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "3648"}, {"name": "C#", "bytes": "2055657"}, {"name": "CSS", "bytes": "1100"}, {"name": "HTML", "bytes": "484"}, {"name": "JavaScript", "bytes": "1252"}, {"name": "Smalltalk", "bytes": "15844"}]}} {"text": "import sys\nfrom conftest import rvo_output, rvo_err\nfrom click.testing import CliRunner\nfrom rvo import cli\n\ndef test_add_all_parameters(isatty_true):\n options = ['add', '-t', 'test', '-c', 'test', '--content', 'test']\n output = ['Document \"test\" created.']\n rvo_output(options,output)\n\ndef test_add_tags(isatty_true):\n options = ['add', '-t', 'test', '--content', 'test']\n output = ['Document \"test\" created.']\n rvo_output(options,output)\n\ndef test_add_title_test(isatty_true):\n options = ['add', '-t', 'test', '--content', 'THIS IS A TITLE']\n output = ['Document \"THIS IS A TITLE\" created.']\n rvo_output(options,output)\n\ndef test_add_title_test_gnarf(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-c', 't\u00f6stc\u00e4t', '-x', 'gnarf'])\n assert not result.exception\n assert result.output.strip().endswith('Document \"gnarf\" created.')\n\ndef test_add_title_test_gnarf(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-c', 't\u00f6stc\u00e4t', '-x', 'gnarf\\nfoo'])\n assert not result.exception\n assert result.output.strip().endswith('Document \"gnarf\" created.')\n\ndef test_add_title_test_hashtag(isatty_true):\n options = ['add', '-t', 'test', '--content', '# THIS IS A TITLE']\n output = ['Document \"THIS IS A TITLE\" created.']\n rvo_output(options,output)\n\ndef test_add_title_test_hashtag(isatty_true):\n options = ['add', '-t', 'test', '--content', '# THIS IS A TITLE\\nmutliline']\n output = ['Document \"THIS IS A TITLE\" created.']\n rvo_output(options,output)\n\ndef test_add_very_long_title(isatty_true):\n options = ['add', '-t', 'test', '--content', '# THIS IS A VERY VERY LONG NEVER ENDING TITLE THAT EXCEEDS LIMITS']\n output = ['Document \"THIS IS A VERY VERY LONG NEVER ENDING TITLE THAT E\" created.']\n rvo_output(options,output)\n\ndef test_add_no_parameters(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add'])\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_one_parameters_tag(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-t', 'testtag'])\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_utf8_cat(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-c', 't\u00f6stc\u00e4t'])\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_utf8_cat_multi(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-c', 't\u00fc\u00fct\u00fc\u00fc', '-c', 't\u00f6stc\u00e4t'])\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_utf8_tag(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-t', 't\u00f6stc\u00e4t'])\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_utf8_tag_multi(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-t', 't\u00fc\u00fct\u00fc\u00fc', '-t', 't\u00f6stc\u00e4t'])\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_encrypt_by_parameter_wrong_pw(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-e', '-p', 'thispasswordistotallywrong', '-t', 'encryption', '-c', 'test'])\n assert result.output.strip().endswith('Invalid Password')\n assert result.exception\n\ndef test_add_encrypt_by_parameter(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-e', '-p', 'test123', '-t', 'encryption', '-c', 'test'])\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_encrypt_by_input(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-e', '-t', 'encryption', '-c', 'test'], input=\"test123\\n\")\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_encrypt_by_input_with_content(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-e', '-t', 'encryption', '-x', 'TEST', '-c', 'test'], input=\"test123\\n\")\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_encrypt_by_input_wrong_pw(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-e', '-t', 'encryption', '-c', 'test'], input=\"test2123\\n\")\n assert result.output.strip().endswith('Invalid Password')\n assert result.exception\n\ndef test_add_read_from_stdin(isatty_false):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add'], input=\"Schwifty\\nSchwifty..lol\\nMorty\\n\\n\")\n assert result.output.strip().endswith('Document \"Schwifty\" created.')\n assert not result.exception\n\ndef test_add_read_from_stdin_with_cat(isatty_false):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-c', 'test'], input=\"Schwifty\\nSchwifty..lol\\nMorty\\n\\n\")\n assert result.output.strip().endswith('Document \"Schwifty\" created.')\n assert not result.exception\n\ndef test_add_read_from_stdin_with_tag(isatty_false):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-t', 'tag'], input=\"Schwifty\\nSchwifty..lol\\nMorty\\n\\n\")\n assert not result.exception\n assert result.output.strip().endswith('Document \"Schwifty\" created.')\n\ndef test_add_conflicting_stdin_reading(isatty_false):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-e'], input=\"Schwifty\\nSchwifty..lol\\nMorty\\n\\n\")\n assert result.exception\n assert result.output.strip().endswith('Invalid Password')\n\ndef test_add_location_germany(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-l', 'Nuremberg', '-c', 'test'])\n assert result.output.strip().endswith('Document \"TEST\" created.')\n assert not result.exception\n\ndef test_add_location_invalid(isatty_true):\n runner = CliRunner()\n result = runner.invoke(cli.cli, ['add', '-l', 'DOESNOTEXISTTOWNATLEASTIHOPE', '-c', 'test'])\n assert result.exception\n", "meta": {"content_hash": "f3c656fac22c9e8bb6d3154d825ff145", "timestamp": "", "source": "github", "line_count": 147, "max_line_length": 120, "avg_line_length": 42.31972789115646, "alnum_prop": 0.6651663719659219, "repo_name": "noqqe/rvo", "id": "27e97720b7f7607b81cff91683a91c7ab63ddf6f", "size": "6288", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/test_add.py", "mode": "33188", "license": "mit", "language": [{"name": "Python", "bytes": "96671"}, {"name": "Shell", "bytes": "724"}]}} {"text": "package com.charlesbishop.webrest.util;\n\nimport java.io.Serializable;\n\nimport org.springframework.context.support.ClassPathXmlApplicationContext;\nimport org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;\n\nimport com.charlesbishop.webrest.dao.AppUserDAO;\nimport com.charlesbishop.webrest.model.AppUser;\n\n/**\n * @author charlie\n * This utility class contains helper methods that can assist in the creation of an AppUser object\n */\npublic class UserCreationUtil {\n\n\tprivate final static String ROLE_USER = \"USER\";\n\tprivate static BCryptPasswordEncoder encoder = new BCryptPasswordEncoder();\n\t\n\tpublic static AppUser createUser(String username, String rawPassword){\n\t\tClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext(\"spring.xml\");\t\t\n\t\t\n\t\tAppUser user = new AppUser();\n\t\tuser.setUsername(username);\n\t\tuser.setPassword(encoder.encode(rawPassword));\n\t\tuser.setUserRole(ROLE_USER);\n\t\t\n\t\tAppUserDAO userDAO = context.getBean(AppUserDAO.class);\n\t\tSerializable id = userDAO.save(user);\n\t\tcontext.close();\n\n\t\ttry {\n\t\t\treturn (id != null) ? user: null;\n\t\t}\n\t\tcatch (Exception e){\n\t\t\treturn null;\n\t\t}\n\t}\n\t\n\t// Check if a password is valid or not\n\tpublic static boolean passwordIsValid(String password){\n\t\tboolean validLength = (password.length() >= 8 && password.length() <= 14);\n\t\tboolean hasSpecial = !password.matches(\"[A-Za-z0-9 ]*\");\n\t\tboolean hasUppercase = !password.equals(password.toLowerCase());\n\t\tboolean hasLowercase = !password.equals(password.toUpperCase());\n\t\t\n\t\treturn validLength & hasSpecial & hasUppercase & hasLowercase;\n\t}\n}\n", "meta": {"content_hash": "204af6d29650b4f2e1595826ea1b235e", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 98, "avg_line_length": 32.183673469387756, "alnum_prop": 0.7564996829422955, "repo_name": "charleshb417/webrest", "id": "1577cffa3fe00991c89ef2d990185cc0f979338b", "size": "1577", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/charlesbishop/webrest/util/UserCreationUtil.java", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "2155"}, {"name": "HTML", "bytes": "4177"}, {"name": "Java", "bytes": "55582"}, {"name": "JavaScript", "bytes": "22330"}]}} {"text": "\n\n#import \"GPAppDelegate.h\"\n\nint main(int argc, char *argv[]) {\n @autoreleasepool {\n return UIApplicationMain(argc, argv, nil, NSStringFromClass([GPAppDelegate class]));\n }\n}\n", "meta": {"content_hash": "0638b209ea0d2dc6d4b80cac9bc7c7e3", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 92, "avg_line_length": 20.88888888888889, "alnum_prop": 0.6702127659574468, "repo_name": "Gi-lo/AboutMe-WWDC", "id": "5e37e4c2232e423af3bde3ea29df85af2e3ef2e7", "size": "1545", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Giulio Petek/Giulio Petek/Supporting Files/main.m", "mode": "33188", "license": "mit", "language": [{"name": "Objective-C", "bytes": "134726"}]}} {"text": "\npackage io.aeron.samples;\n\nimport io.aeron.*;\nimport io.aeron.driver.*;\nimport io.aeron.logbuffer.*;\nimport org.agrona.*;\nimport org.agrona.concurrent.*;\n\nimport java.nio.ByteBuffer;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.locks.LockSupport;\n\nimport static org.agrona.BitUtil.CACHE_LINE_LENGTH;\nimport static org.agrona.SystemUtil.loadPropertiesFiles;\nimport static org.agrona.UnsafeAccess.UNSAFE;\n\npublic class EmbeddedIpcThroughput\n{\n public static final int BURST_LENGTH = 1_000_000;\n public static final int MESSAGE_LENGTH = SampleConfiguration.MESSAGE_LENGTH;\n public static final int MESSAGE_COUNT_LIMIT = SampleConfiguration.FRAGMENT_COUNT_LIMIT;\n public static final String CHANNEL = CommonContext.IPC_CHANNEL;\n public static final int STREAM_ID = SampleConfiguration.STREAM_ID;\n\n public static void main(final String[] args) throws Exception\n {\n loadPropertiesFiles(args);\n\n final AtomicBoolean running = new AtomicBoolean(true);\n SigInt.register(() -> running.set(false));\n\n final MediaDriver.Context ctx = new MediaDriver.Context()\n .threadingMode(ThreadingMode.SHARED)\n .sharedIdleStrategy(new NoOpIdleStrategy());\n\n try (MediaDriver ignore = MediaDriver.launch(ctx);\n Aeron aeron = Aeron.connect();\n Publication publication = aeron.addPublication(CHANNEL, STREAM_ID);\n Subscription subscription = aeron.addSubscription(CHANNEL, STREAM_ID))\n {\n final Subscriber subscriber = new Subscriber(running, subscription);\n final Thread subscriberThread = new Thread(subscriber);\n subscriberThread.setName(\"subscriber\");\n final Thread publisherThread = new Thread(new Publisher(running, publication));\n publisherThread.setName(\"publisher\");\n final Thread rateReporterThread = new Thread(new RateReporter(running, subscriber));\n rateReporterThread.setName(\"rate-reporter\");\n\n rateReporterThread.start();\n subscriberThread.start();\n publisherThread.start();\n\n subscriberThread.join();\n publisherThread.join();\n rateReporterThread.join();\n }\n }\n\n public static final class RateReporter implements Runnable\n {\n private final AtomicBoolean running;\n private final Subscriber subscriber;\n\n public RateReporter(final AtomicBoolean running, final Subscriber subscriber)\n {\n this.running = running;\n this.subscriber = subscriber;\n }\n\n public void run()\n {\n long lastTimeStamp = System.currentTimeMillis();\n long lastTotalBytes = subscriber.totalBytes();\n\n while (running.get())\n {\n LockSupport.parkNanos(1_000_000_000);\n\n final long newTimeStamp = System.currentTimeMillis();\n final long newTotalBytes = subscriber.totalBytes();\n\n final long duration = newTimeStamp - lastTimeStamp;\n final long bytesTransferred = newTotalBytes - lastTotalBytes;\n\n System.out.format(\n \"Duration %dms - %,d messages - %,d payload bytes%n\",\n duration, bytesTransferred / MESSAGE_LENGTH, bytesTransferred);\n\n lastTimeStamp = newTimeStamp;\n lastTotalBytes = newTotalBytes;\n }\n }\n }\n\n public static final class Publisher implements Runnable\n {\n private final AtomicBoolean running;\n private final Publication publication;\n\n public Publisher(final AtomicBoolean running, final Publication publication)\n {\n this.running = running;\n this.publication = publication;\n }\n\n public void run()\n {\n final Publication publication = this.publication;\n final ByteBuffer byteBuffer = BufferUtil.allocateDirectAligned(\n publication.maxMessageLength(), CACHE_LINE_LENGTH);\n final UnsafeBuffer buffer = new UnsafeBuffer(byteBuffer);\n long backPressureCount = 0;\n long totalMessageCount = 0;\n\n outputResults:\n while (running.get())\n {\n for (int i = 0; i < BURST_LENGTH; i++)\n {\n while (publication.offer(buffer, 0, MESSAGE_LENGTH) <= 0)\n {\n ++backPressureCount;\n if (!running.get())\n {\n break outputResults;\n }\n }\n\n ++totalMessageCount;\n }\n }\n\n final double backPressureRatio = backPressureCount / (double)totalMessageCount;\n System.out.format(\"Publisher back pressure ratio: %f%n\", backPressureRatio);\n }\n }\n\n public static final class Subscriber implements Runnable, FragmentHandler\n {\n private static final long TOTAL_BYTES_OFFSET;\n\n static\n {\n try\n {\n TOTAL_BYTES_OFFSET = UNSAFE.objectFieldOffset(Subscriber.class.getDeclaredField(\"totalBytes\"));\n }\n catch (final Exception ex)\n {\n throw new RuntimeException(ex);\n }\n }\n\n private final AtomicBoolean running;\n private final Subscription subscription;\n\n private volatile long totalBytes = 0;\n\n public Subscriber(final AtomicBoolean running, final Subscription subscription)\n {\n this.running = running;\n this.subscription = subscription;\n }\n\n public long totalBytes()\n {\n return totalBytes;\n }\n\n public void run()\n {\n while (!subscription.isConnected())\n {\n Thread.yield();\n }\n\n final Image image = subscription.imageAtIndex(0);\n\n long failedPolls = 0;\n long successfulPolls = 0;\n\n while (running.get())\n {\n final int fragmentsRead = image.poll(this, MESSAGE_COUNT_LIMIT);\n if (0 == fragmentsRead)\n {\n ++failedPolls;\n }\n else\n {\n ++successfulPolls;\n }\n }\n\n final double failureRatio = failedPolls / (double)(successfulPolls + failedPolls);\n System.out.format(\"Subscriber poll failure ratio: %f%n\", failureRatio);\n }\n\n public void onFragment(final DirectBuffer buffer, final int offset, final int length, final Header header)\n {\n UNSAFE.putOrderedLong(this, TOTAL_BYTES_OFFSET, totalBytes + length);\n }\n }\n}\n", "meta": {"content_hash": "c7579036649a7a0f90a8c95aaeaa261d", "timestamp": "", "source": "github", "line_count": 205, "max_line_length": 114, "avg_line_length": 33.40487804878049, "alnum_prop": 0.5867406542056075, "repo_name": "galderz/Aeron", "id": "e6115c3fc747af9ffe07efd3185e1fe055665939", "size": "7447", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aeron-samples/src/main/java/io/aeron/samples/EmbeddedIpcThroughput.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "28890"}, {"name": "C", "bytes": "955025"}, {"name": "C++", "bytes": "1444914"}, {"name": "CMake", "bytes": "41954"}, {"name": "Java", "bytes": "3802252"}, {"name": "Shell", "bytes": "35251"}]}} {"text": "\n\npackage io.fabric8.activemq.facade;\n\nimport org.apache.activemq.command.ActiveMQDestination;\n\nimport java.util.Collection;\n\n/**\n * A facade for either a local in JVM broker or a remote broker over JMX\n *\n * \n * \n */\npublic interface BrokerFacade {\n\n /**\n * @return a unique id for this resource, typically a JMX ObjectName\n * @throws Exception\n */\n String getId() throws Exception;\n\n /**\n * Returns all the available brokers.\n *\n * @return not null\n * @throws Exception\n */\n public BrokerFacade[] getBrokers() throws Exception;\n\n\t/**\n\t * The name of the active broker (f.e. 'localhost' or 'my broker').\n\t * \n\t * @return not null\n\t * @throws Exception\n\t */\n\tString getBrokerName() throws Exception;\n\n\t/**\n\t * Admin view of the broker.\n\t * \n\t * @return not null\n\t * @throws Exception\n\t */\n\tBrokerViewFacade getBrokerAdmin() throws Exception;\n\n\t/**\n\t * All queues known to the broker.\n\t * \n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getQueues() throws Exception;\n\n\t/**\n\t * All topics known to the broker.\n\t * \n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getTopics() throws Exception;\n\n\t/**\n\t * All active consumers of a queue.\n\t * \n\t * @param queueName\n\t * the name of the queue, not null\n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getQueueConsumers(String queueName)\n\t\t\tthrows Exception;\n\n /**\n * Returns the consumers available on the given topic\n */\n\tCollection getTopicConsumers(String topicName) throws Exception;\n\n /**\n * Returns the durable consumers available on the given topic\n */\n Collection getTopicDurableConsumers(String topicName) throws Exception;\n\n\n /**\n * Returns the producers available on the given queue\n */\n Collection getQueueProducers(String queueName) throws Exception;\n\n /**\n * Returns the producers available on the given topic\n */\n Collection getTopicProducers(String topicName) throws Exception;\n\n\t/**\n\t * Active durable subscribers to topics of the broker.\n\t * \n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getDurableTopicSubscribers()\n\t\t\tthrows Exception;\n\n\n\t/**\n\t * Inactive durable subscribers to topics of the broker.\n\t *\n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getInactiveDurableTopicSubscribers()\n\t\t\tthrows Exception;\n\n\t/**\n\t * The names of all transport connectors of the broker (f.e. openwire, ssl)\n\t * \n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getConnectors() throws Exception;\n\n\t/**\n\t * A transport connectors.\n\t * \n\t * @param name\n\t * name of the connector (f.e. openwire)\n\t * @return null if not found\n\t * @throws Exception\n\t */\n\tConnectorViewFacade getConnector(String name) throws Exception;\n\n\t/**\n\t * All connections to all transport connectors of the broker.\n\t * \n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getConnections() throws Exception;\n\n\t/**\n\t * The names of all connections to a specific transport connectors of the\n\t * broker.\n\t * \n\t * @see #getConnection(String)\n\t * @param connectorName\n\t * not null\n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getConnections(String connectorName) throws Exception;\n\n\t/**\n\t * A specific connection to the broker.\n\t * \n\t * @param connectionName\n\t * the name of the connection, not null\n\t * @return not null\n\t * @throws Exception\n\t */\n\tConnectionViewFacade getConnection(String connectionName) throws Exception;\n\t/**\n\t * Returns all consumers of a connection.\n\t * \n\t * @param connectionName\n\t * the name of the connection, not null\n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getConsumersOnConnection(\n\t\t\tString connectionName) throws Exception;\n\t/**\n\t * The brokers network connectors.\n\t * \n\t * @return not null\n\t * @throws Exception\n\t */\n\tCollection getNetworkConnectors()\n\t\t\tthrows Exception;\n\n\n\t/**\n\t * The brokers network bridges.\n\t *\n\t * @return not null\n\t * @throws Exception\n\t */\n Collection getNetworkBridges()\n throws Exception;\n\n /**\n\t * Purges the given destination\n\t * \n\t * @param destination\n\t * @throws Exception\n\t */\n\tvoid purgeQueue(ActiveMQDestination destination) throws Exception;\n\t/**\n\t * Get the view of the queue with the specified name.\n\t * \n\t * @param name\n\t * not null\n\t * @return null if no queue with this name exists\n\t * @throws Exception\n\t */\n\tQueueViewFacade getQueue(String name) throws Exception;\n\t/**\n\t * Get the view of the topic with the specified name.\n\t * \n\t * @param name\n\t * not null\n\t * @return null if no topic with this name exists\n\t * @throws Exception\n\t */\n\tTopicViewFacade getTopic(String name) throws Exception;\n\t\n\t/**\n\t * Get the JobScheduler MBean\n\t * @return the jobScheduler or null if not configured\n\t * @throws Exception\n\t */\n\tJobSchedulerViewFacade getJobScheduler() throws Exception;\n\t\n\t/**\n * Get the JobScheduler MBean\n * @return the jobScheduler or null if not configured\n * @throws Exception\n */\n Collection getScheduledJobs() throws Exception;\n\n boolean isJobSchedulerStarted();\n}\n", "meta": {"content_hash": "721459e845a5da7add68a4a701a66ffa", "timestamp": "", "source": "github", "line_count": 231, "max_line_length": 106, "avg_line_length": 24.874458874458874, "alnum_prop": 0.6801253045596937, "repo_name": "alexeev/jboss-fuse-mirror", "id": "a2ecf4d144d92f3c59a9453b2d7247ad73d2fba7", "size": "6377", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tooling/tooling-activemq-facade/src/main/java/io/fabric8/activemq/facade/BrokerFacade.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "313969"}, {"name": "CoffeeScript", "bytes": "278706"}, {"name": "Java", "bytes": "8498768"}, {"name": "JavaScript", "bytes": "2483260"}, {"name": "Kotlin", "bytes": "14282"}, {"name": "Scala", "bytes": "484151"}, {"name": "Shell", "bytes": "11547"}, {"name": "XSLT", "bytes": "26098"}]}} {"text": "\n\n\n\n\n\nArch Game Engine: Class Members - Functions\n\n\n\n\n\n\n\n\n\n\n
\n
\n\n \n \n \n \n \n
\n
Arch Game Engine\n  0.2\n
\n
\n
\n\n\n\n
\n \n
\n \n
\n \n
\n
\n \n
\n
\n\n
\n
\n\n\n
\n\n
\n\n
\n \n\n

- m -

\n
\n\n
\nGenerated by  \n\"doxygen\"/\n 1.8.11\n
\n\n\n", "meta": {"content_hash": "6d609a8ae85150b0d0ec24c54e903aa2", "timestamp": "", "source": "github", "line_count": 138, "max_line_length": 154, "avg_line_length": 42.78260869565217, "alnum_prop": 0.6321138211382114, "repo_name": "jarreed0/ArchGE", "id": "de8855bacfd51d6ab2de6ce2b10b6c15f14e1de5", "size": "5904", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/html/functions_func_m.html", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C++", "bytes": "232681"}, {"name": "Shell", "bytes": "10667"}]}} {"text": "\ufeffusing System;\n\npublic interface IExecutionScope : IDisposable\n#if NATIVE_ASYNC\n\t, IAsyncDisposable\n#endif\n{\n}\n", "meta": {"content_hash": "dba4eb24c8b66c72fb0856faf979ff42", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 46, "avg_line_length": 13.875, "alnum_prop": 0.7747747747747747, "repo_name": "linq2db/linq2db", "id": "8fe6042321283fe54eb99b53ef62ef9ba0ca4fb8", "size": "113", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Source/LinqToDB/DataProvider/IExecutionScope.cs", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "29203"}, {"name": "C#", "bytes": "18569855"}, {"name": "F#", "bytes": "15865"}, {"name": "PLSQL", "bytes": "29278"}, {"name": "PLpgSQL", "bytes": "15809"}, {"name": "PowerShell", "bytes": "5130"}, {"name": "SQLPL", "bytes": "10530"}, {"name": "Shell", "bytes": "29373"}, {"name": "Smalltalk", "bytes": "11"}, {"name": "TSQL", "bytes": "104099"}, {"name": "Visual Basic .NET", "bytes": "3871"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.ComponentModel.Design;\nusing Cirrious.MvvmCross.Plugins.Email;\nusing Cirrious.MvvmCross.Plugins.WebBrowser;\nusing codestuffers.MvvmCross.Plugins.FeedbackDialog.OpenCriteria;\nusing codestuffers.MvvmCross.Plugins.UserInteraction;\nusing FluentAssertions;\nusing Moq;\nusing NUnit.Framework;\n\nnamespace codestuffers.MvvmCross.Plugins.FeedbackDialog.Tests\n{\n [TestFixture]\n public class MvxFeedbackDialogTests\n {\n private Mock _userInteraction;\n private Mock _dataService;\n private Mock _emailTask;\n private Mock _webBrowser;\n private Mock _data;\n private FeedbackDialogConfiguration _configuration;\n private List _criteria;\n private MvxFeedbackDialog _testObject;\n\n [SetUp]\n public void Setup()\n {\n _criteria = new List();\n _data = new Mock();\n _data.SetupProperty(x => x.DialogWasShown);\n\n _userInteraction = new Mock();\n _dataService = new Mock();\n _dataService.Setup(x => x.GetData()).Returns(() => _data.Object);\n\n _emailTask = new Mock();\n _webBrowser = new Mock();\n _configuration = new FeedbackDialogConfiguration {OpenCriteria = _criteria};\n\n _testObject = new MvxFeedbackDialog(_userInteraction.Object, \n _dataService.Object, _emailTask.Object, _webBrowser.Object);\n _testObject.SetConfiguration(_configuration);\n }\n\n [Test]\n public void RecordAppStart_ShouldNotRecordAppStart_WhenDialogHasAlreadyBeenOpened()\n {\n _data.Setup(x => x.DialogWasShown).Returns(true);\n _testObject.RecordAppStart();\n _data.Verify(x => x.AppHasOpened(), Times.Never());\n }\n\n [Test]\n public void RecordAppStart_ShouldRecordAppStart_WhenDialogHasNotBeenOpened()\n {\n _data.Setup(x => x.DialogWasShown).Returns(false);\n _testObject.RecordAppStart();\n _data.Verify(x => x.AppHasOpened(), Times.Once());\n }\n\n [Test]\n public void RecordAppStart_ShouldMarkDialogAsOpened_WhenAllCriteriaIsTrue()\n {\n var criteria1 = new Mock();\n criteria1.Setup(x => x.ShouldOpen(It.IsAny())).Returns(true);\n var criteria2 = new Mock();\n criteria2.Setup(x => x.ShouldOpen(It.IsAny())).Returns(true);\n\n _criteria.Add(criteria1.Object);\n _criteria.Add(criteria2.Object);\n\n _data.SetupSet(x => x.DialogWasShown = true);\n _testObject.RecordAppStart();\n _data.VerifySet(x => x.DialogWasShown = true);\n }\n\n [TestCase(false, true)]\n [TestCase(true, false)]\n [TestCase(false, false)]\n public void RecordAppStart_ShouldNotMarkDialogAsOpened_WhenAnyCriteriaIsFalse(bool crit1, bool crit2)\n {\n var criteria1 = new Mock();\n criteria1.Setup(x => x.ShouldOpen(It.IsAny())).Returns(crit1);\n var criteria2 = new Mock();\n criteria2.Setup(x => x.ShouldOpen(It.IsAny())).Returns(crit2);\n\n _criteria.Add(criteria1.Object);\n _criteria.Add(criteria2.Object);\n\n _data.SetupSet(x => x.DialogWasShown = false);\n _testObject.RecordAppStart();\n _data.VerifySet(x => x.DialogWasShown = false);\n }\n\n [Test]\n public void RecordAppStart_ShouldNotMarkDialogAsOpened_WhenThereAreNoCriteria()\n {\n _data.SetupProperty(x => x.DialogWasShown, false);\n _testObject.RecordAppStart();\n _data.Object.DialogWasShown.Should().BeFalse();\n }\n\n [Test]\n public void RecordAppStart_ShouldSaveData_WhenDialogHasNotBeenOpened()\n {\n _data.Setup(x => x.DialogWasShown).Returns(false);\n _testObject.RecordAppStart();\n _dataService.Verify(x => x.SaveData(_data.Object), Times.Once());\n }\n\n [Test]\n public void RecordAppStart_ShouldOpenDialog_WhenAllCriteriaIsTrue()\n {\n var criteria1 = new Mock();\n criteria1.Setup(x => x.ShouldOpen(It.IsAny())).Returns(true);\n _criteria.Add(criteria1.Object);\n\n _testObject.RecordAppStart();\n _userInteraction.Verify(x => x.ShowDialog(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()));\n }\n }\n}\n", "meta": {"content_hash": "02e350f8aaf8f3dc50af6531bac879ca", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 175, "avg_line_length": 39.8780487804878, "alnum_prop": 0.6332313965341488, "repo_name": "codestuffers/MvvmCrossPlugins", "id": "5f6368e674642a4bf1562e9743ab9f29696b71a6", "size": "4907", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/FeedbackDialog/Tests/codestuffers.MvvmCross.Plugins.FeedbackDialog.Tests/MvxFeedbackDialogTests.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "124864"}, {"name": "PowerShell", "bytes": "203"}, {"name": "Puppet", "bytes": "906"}]}} {"text": "/*\n * cs170 -- Rich wolski\n * producer-consumer example using kthreads\n * uses condition variables for full/empty conditions\n * uses condition variable to fulfill order\n * uses separate lock for each stock\n */\n#include \n#include \n#include \n#include \n\n#include \"c-timer.h\"\n#include \"kt.h\"\n\n#define RAND() (drand48())\n\nstruct order\n{\n\tint stock_id;\n\tint quantity;\n\tint action;\t/* buy or sell */\n\tkt_sem fulfilled;\t\n};\n\nstruct order_que\n{\n\tstruct order **orders;\n\tint size;\n\tint head;\n\tint tail;\n\tkt_sem full;\n\tkt_sem empty;\n};\n\nstruct stock\n{\n\tint quantity;\n};\n\nstruct market\n{\n\tstruct stock *stocks;\n\tint count;\n};\n\nstruct order *InitOrder(int id, int quantity, int action)\n{\n\tstruct order *order;\n\n\torder = (struct order *)malloc(sizeof(struct order));\n\tif(order == NULL) {\n\t\treturn(NULL);\n\t}\n\torder->stock_id = id;\n\torder->quantity = quantity;\n\torder->action = action;\n\torder->fulfilled = make_kt_sem(0);\n\treturn(order);\n}\n\nvoid FreeOrder(struct order *order)\n{\n\tkill_kt_sem(order->fulfilled);\n\tfree(order);\n}\n\nstruct order_que *InitOrderQue(int size)\n{\n\tstruct order_que *oq;\n\n\toq = (struct order_que *)malloc(sizeof(struct order_que));\n\tif(oq == NULL) {\n\t\treturn(NULL);\n\t}\n\tmemset(oq,0,sizeof(struct order_que));\n\n\toq->size = size+1; /* empty condition burns a slot */\n\toq->orders = (struct order **)malloc(size*sizeof(struct order *));\n\tif(oq->orders == NULL) {\n\t\tfree(oq);\n\t\treturn(NULL);\n\t}\n\tmemset(oq->orders,0,size*sizeof(struct order *));\n\n\toq->full = make_kt_sem(size);\n\toq->empty = make_kt_sem(0);\n\n\treturn(oq);\n}\n\nvoid FreeOrderQue(struct order_que *oq)\n{\n\twhile(oq->head != oq->tail) {\n\t\tFreeOrder(oq->orders[oq->tail]);\n\t\toq->tail = (oq->tail + 1) % oq->size;\n\t}\n\n\tkill_kt_sem(oq->full);\n\tkill_kt_sem(oq->empty);\n\tfree(oq->orders);\n\tfree(oq);\n\treturn;\n}\n\nstruct market *InitMarket(int stock_count, int init_quantity)\n{\n\tstruct market *m;\n\tint i;\n\n\tm = (struct market *)malloc(sizeof(struct market));\n\tif(m == NULL) {\n\t\treturn(NULL);\n\t}\n\tm->count = stock_count;\n\n\tm->stocks = (struct stock *)malloc(stock_count*sizeof(struct stock));\n\tif(m->stocks == NULL) {\n\t\tfree(m);\n\t\treturn(NULL);\n\t}\n\n\tfor(i=0; i < stock_count; i++) {\n\t\tm->stocks[i].quantity = init_quantity;\n\t}\n\n\treturn(m);\n}\n\nvoid FreeMarket(struct market *m)\n{\n\tfree(m->stocks);\n\tfree(m);\n\treturn;\n}\n\nvoid PrintMarket(struct market *m)\n{\n\tint i;\n\tfor(i=0; i < m->count; i++) {\n\t\tprintf(\"stock: %d, quantity: %d\\n\",\n\t\t\ti,m->stocks[i].quantity);\n\t}\n\n\treturn;\n}\n\nstruct client_arg\n{\n\tint id;\n\tint order_count;\n\tstruct order_que *order_que;\n\tint max_stock_id;\n\tint max_quantity;\n\tint verbose;\n};\n\nstruct trader_arg\n{\n\tint id;\n\tstruct order_que *order_que;\n\tstruct market *market;\n\tint *done;\n\tint verbose;\n};\n\nvoid ClientThread(void *arg)\n{\n\tstruct client_arg *ca = (struct client_arg *)arg;\n\tint i;\n\tint next;\n\tstruct order *order;\n\tint stock_id;\n\tint quantity;\n\tint action;\n\tint queued;\n\tdouble now;\n\t\n\n\tfor(i=0; i < ca->order_count; i++) {\n\t\t/*\n\t\t * create an order for a random stock\n\t\t */\n\t\tstock_id = (int)(RAND() * ca->max_stock_id);\n\t\tquantity = (int)(RAND() * ca->max_quantity);\n\t\tif(RAND() > 0.5) {\n\t\t\taction = 0; /* 0 => buy */\n\t\t} else {\n\t\t\taction = 1; /* 1 => sell */\n\t\t}\n\t\torder = InitOrder(stock_id,quantity,action);\n\t\tif(order == NULL) {\n\t\t\tfprintf(stderr,\"no space for order\\n\");\n\t\t\texit(1);\n\t\t}\n\t\t/*\n\t\t * queue it for the traders\n\t\t */\n\t\tP_kt_sem(ca->order_que->full);\n\t\tnext = (ca->order_que->head + 1) % ca->order_que->size;\n\t\t/*\n\t\t * there is space in the queue, add the order and bump\n\t\t * the head\n\t\t */\n\t\tif(ca->verbose == 1) {\n\t\t\tnow = CTimer();\n\t\t\tprintf(\"%10.0f client %d: \",now,ca->id);\n\t\t\tprintf(\"queued stock %d, for %d, %s\\n\",\n\t\t\t\torder->stock_id,\n\t\t\t\torder->quantity,\n\t\t\t\t(order->action ? \"SELL\" : \"BUY\")); \n\t\t}\n\t\tca->order_que->orders[next] = order;\n\t\tca->order_que->head = next;\n\t\t/*\n\t\t * signal traders that there is another order\n\t\t */\n\t\tV_kt_sem(ca->order_que->empty);\n\t\t/*\n\t\t * wait until the order is fulfilled\n\t\t */\n\t\tP_kt_sem(order->fulfilled);\n\t\t/*\n\t\t * done, free the order and repeat\n\t\t */\n\t\tFreeOrder(order);\n\t}\n\n\treturn;\n}\n\nvoid TraderThread(void *arg)\n{\n\tstruct trader_arg *ta = (struct trader_arg *)arg;\n\tint dequeued;\n\tstruct order *order;\n\tint tail;\n\tdouble now;\n\tint next;\n\tstruct stock *stock;\n\n\twhile(1) {\n\t\t/*\n\t\t * wait until there is a new order\n\t\t */\n\t\tP_kt_sem(ta->order_que->empty);\n\t\t/*\n\t\t * are we done? Tell the next trader and exit\n\t\t */\n\t\tif(*(ta->done) == 1) {\n\t\t\tV_kt_sem(ta->order_que->empty);\n\t\t\tkt_exit();\n\t\t}\n\t\t/*\n\t\t * get the next order\n\t\t */\n\t\tnext = (ta->order_que->tail + 1) % ta->order_que->size;\n\t\torder = ta->order_que->orders[next];\n\t\tta->order_que->tail = next;\n\t\t/*\n\t\t * tell the clients there is another free slot\n\t\t */\n\t\tV_kt_sem(ta->order_que->full);\n\t\t/*\n\t\t * have an order to process\n\t\t */\n\t\tstock = &(ta->market->stocks[order->stock_id]);\n\t\tif(order->action == 1) { /* BUY */\n\t\t\tstock->quantity -= order->quantity;\n\t\t\tif(stock->quantity < 0) {\n\t\t\t\tstock->quantity = 0;\n\t\t\t}\n\t\t} else {\n\t\t\tstock->quantity += order->quantity;\n\t\t}\n\t\tif(ta->verbose == 1) {\n\t\t\tnow = CTimer();\n\t\t\tprintf(\"%10.0f trader: %d \",now,ta->id);\n\t\t\tprintf(\"fulfilled stock %d for %d\\n\",\n\t\t\t\torder->stock_id,\n\t\t\t\torder->quantity);\n\t\t}\n\t\t/*\n\t\t * tell the client the order is done\n\t\t */\n\t\tV_kt_sem(order->fulfilled);\n\t}\n\n\treturn;\n}\n\n#define ARGS \"c:t:o:q:s:V\"\nchar *Usage = \"market1 -c clients -t traders -o orders -q queue-size -s stock-count -V \\n\";\n\n#define INIT_COUNT 5000\n\nint main(int argc, char **argv)\n{\n\tint c;\n\tint client_threads;\n\tint trader_threads;\n\tint orders_per_client;\n\tint que_size;\n\tint max_stock;\n\tint verbose;\n\tstruct client_arg *ca;\n\tstruct trader_arg *ta;\n\tvoid **client_ids;\n\tvoid **trader_ids;\n\tstruct order_que *order_que;\n\tstruct market *market;\n\tint i;\n\tint done;\n\tint err;\n\tdouble start;\n\tdouble end;\n\t\n\n\t/*\n\t * defaults\n\t */\n\tclient_threads = 1;\n\ttrader_threads = 1;\n\torders_per_client = 1;\n\tverbose = 0;\n\tque_size = 1;\n\tmax_stock = 1;\n\n\twhile((c = getopt(argc,argv,ARGS)) != EOF) {\n\t\tswitch(c) {\n\t\t\tcase 'c':\n\t\t\t\tclient_threads = atoi(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 't':\n\t\t\t\ttrader_threads = atoi(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 'o':\n\t\t\t\torders_per_client = atoi(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 'q':\n\t\t\t\tque_size = atoi(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 's':\n\t\t\t\tmax_stock = atoi(optarg);\n\t\t\t\tbreak;\n\t\t\tcase 'V':\n\t\t\t\tverbose = 1;\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tfprintf(stderr,\n\t\t\t\t\t\"unrecognized command %c\\n\",\n\t\t\t\t\t\t(char)c);\n\t\t\t\tfprintf(stderr,\"usage: %s\",Usage);\n\t\t\t\texit(1);\n\t\t}\n\t}\n\n\tclient_ids = (void **)malloc(client_threads*sizeof(void *));\n\tif(client_ids == NULL) {\n\t\texit(1);\n\t}\n\n\tca = (struct client_arg *)malloc(client_threads*sizeof(struct client_arg));\n\tif(ca == NULL) {\n\t\texit(1);\n\t}\n\n\ttrader_ids = (void **)malloc(trader_threads*sizeof(void *));\n\tif(trader_ids == NULL) {\n\t\texit(1);\n\t}\n\n\tta = (struct trader_arg *)malloc(trader_threads*sizeof(struct trader_arg));\n\tif(ta == NULL) {\n\t\texit(1);\n\t}\n\n\torder_que = InitOrderQue(que_size);\n\tif(order_que == NULL) {\n\t\texit(1);\n\t}\n\n\tmarket = InitMarket(max_stock,INIT_COUNT);\n\tif(market == NULL) {\n\t\texit(1);\n\t}\n\n\tstart = CTimer();\n\tfor(i=0; i < client_threads; i++) {\n\t\tca[i].id = i;\n\t\tca[i].order_count = orders_per_client;\n\t\tca[i].max_stock_id = max_stock;\n\t\tca[i].max_quantity = INIT_COUNT;\n\t\tca[i].order_que = order_que;\n\t\tca[i].verbose = verbose;\n\t\tclient_ids[i] = kt_fork(ClientThread,(void *)&ca[i]);\n\t\tif(client_ids[i] == NULL) {\n\t\t\tfprintf(stderr,\"client thread create %d failed\\n\",i);\n\t\t\texit(1);\n\t\t}\n\t}\n\n\tdone = 0;\n\tfor(i=0; i < trader_threads; i++) {\n\t\tta[i].id = i;\n\t\tta[i].order_que = order_que;\n\t\tta[i].market = market;\n\t\tta[i].done = &done;\n\t\tta[i].verbose = verbose;\n\t\ttrader_ids[i] = kt_fork(TraderThread,(void *)&ta[i]);\n\t\tif(trader_ids[i] == NULL) {\n\t\t\tfprintf(stderr,\"trader thread create %d failed\\n\",i);\n\t\t\texit(1);\n\t\t}\n\t}\n\n\t/*\n\t * wait for the clients to finish\n\t */\n\tfor(i=0; i < client_threads; i++) {\n\t\tkt_join(client_ids[i]);\n\t}\n\n\t/*\n\t * tell the traders we are done\n\t */\n\tdone = 1;\n\tV_kt_sem(order_que->empty);\n\n\tfor(i=0; i < trader_threads; i++) {\n\t\tkt_join(trader_ids[i]);\n\t}\n\tend = CTimer();\n\n\tif(verbose == 1) {\n\t\tPrintMarket(market);\n\t}\n\n\tprintf(\"%f transactions / sec\\n\",\n\t\t(double)(orders_per_client*client_threads) / (end-start));\n\n\tfree(ca);\n\tfree(ta);\n\tfree(client_ids);\n\tfree(trader_ids);\n\tFreeMarket(market);\n\tFreeOrderQue(order_que);\n\n\treturn(0);\n}\n\t\t\n\n\n\t\t\n\t\t\n\t\t\n\n\t\n\t\n\n\n\n\t\n\t\n\n", "meta": {"content_hash": "abcc4e4038574450a3e9091fa1b259aa", "timestamp": "", "source": "github", "line_count": 465, "max_line_length": 103, "avg_line_length": 17.974193548387095, "alnum_prop": 0.6057669298875329, "repo_name": "SeraphRoy/SeraphRoy.github.io", "id": "8a8573a8be750ca694d05a0824a8b9b82f9f346f", "size": "8358", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "assets/market-kthreads.c", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "96402"}, {"name": "HTML", "bytes": "36"}, {"name": "Ruby", "bytes": "8813"}]}} {"text": "package client\n\nconst (\n\tStatefulSetSpecType = \"statefulSetSpec\"\n\tStatefulSetSpecFieldActiveDeadlineSeconds = \"activeDeadlineSeconds\"\n\tStatefulSetSpecFieldAutomountServiceAccountToken = \"automountServiceAccountToken\"\n\tStatefulSetSpecFieldContainers = \"containers\"\n\tStatefulSetSpecFieldDNSConfig = \"dnsConfig\"\n\tStatefulSetSpecFieldDNSPolicy = \"dnsPolicy\"\n\tStatefulSetSpecFieldEnableServiceLinks = \"enableServiceLinks\"\n\tStatefulSetSpecFieldEphemeralContainers = \"ephemeralContainers\"\n\tStatefulSetSpecFieldFSGroupChangePolicy = \"fsGroupChangePolicy\"\n\tStatefulSetSpecFieldFsgid = \"fsgid\"\n\tStatefulSetSpecFieldGids = \"gids\"\n\tStatefulSetSpecFieldHostAliases = \"hostAliases\"\n\tStatefulSetSpecFieldHostIPC = \"hostIPC\"\n\tStatefulSetSpecFieldHostNetwork = \"hostNetwork\"\n\tStatefulSetSpecFieldHostPID = \"hostPID\"\n\tStatefulSetSpecFieldHostname = \"hostname\"\n\tStatefulSetSpecFieldImagePullSecrets = \"imagePullSecrets\"\n\tStatefulSetSpecFieldNodeID = \"nodeId\"\n\tStatefulSetSpecFieldObjectMeta = \"metadata\"\n\tStatefulSetSpecFieldOverhead = \"overhead\"\n\tStatefulSetSpecFieldPreemptionPolicy = \"preemptionPolicy\"\n\tStatefulSetSpecFieldReadinessGates = \"readinessGates\"\n\tStatefulSetSpecFieldRestartPolicy = \"restartPolicy\"\n\tStatefulSetSpecFieldRunAsGroup = \"runAsGroup\"\n\tStatefulSetSpecFieldRunAsNonRoot = \"runAsNonRoot\"\n\tStatefulSetSpecFieldRuntimeClassName = \"runtimeClassName\"\n\tStatefulSetSpecFieldScale = \"scale\"\n\tStatefulSetSpecFieldScheduling = \"scheduling\"\n\tStatefulSetSpecFieldSeccompProfile = \"seccompProfile\"\n\tStatefulSetSpecFieldSelector = \"selector\"\n\tStatefulSetSpecFieldServiceAccountName = \"serviceAccountName\"\n\tStatefulSetSpecFieldSetHostnameAsFQDN = \"setHostnameAsFQDN\"\n\tStatefulSetSpecFieldShareProcessNamespace = \"shareProcessNamespace\"\n\tStatefulSetSpecFieldStatefulSetConfig = \"statefulSetConfig\"\n\tStatefulSetSpecFieldSubdomain = \"subdomain\"\n\tStatefulSetSpecFieldSysctls = \"sysctls\"\n\tStatefulSetSpecFieldTerminationGracePeriodSeconds = \"terminationGracePeriodSeconds\"\n\tStatefulSetSpecFieldTopologySpreadConstraints = \"topologySpreadConstraints\"\n\tStatefulSetSpecFieldUid = \"uid\"\n\tStatefulSetSpecFieldVolumes = \"volumes\"\n\tStatefulSetSpecFieldWindowsOptions = \"windowsOptions\"\n)\n\ntype StatefulSetSpec struct {\n\tActiveDeadlineSeconds *int64 `json:\"activeDeadlineSeconds,omitempty\" yaml:\"activeDeadlineSeconds,omitempty\"`\n\tAutomountServiceAccountToken *bool `json:\"automountServiceAccountToken,omitempty\" yaml:\"automountServiceAccountToken,omitempty\"`\n\tContainers []Container `json:\"containers,omitempty\" yaml:\"containers,omitempty\"`\n\tDNSConfig *PodDNSConfig `json:\"dnsConfig,omitempty\" yaml:\"dnsConfig,omitempty\"`\n\tDNSPolicy string `json:\"dnsPolicy,omitempty\" yaml:\"dnsPolicy,omitempty\"`\n\tEnableServiceLinks *bool `json:\"enableServiceLinks,omitempty\" yaml:\"enableServiceLinks,omitempty\"`\n\tEphemeralContainers []EphemeralContainer `json:\"ephemeralContainers,omitempty\" yaml:\"ephemeralContainers,omitempty\"`\n\tFSGroupChangePolicy string `json:\"fsGroupChangePolicy,omitempty\" yaml:\"fsGroupChangePolicy,omitempty\"`\n\tFsgid *int64 `json:\"fsgid,omitempty\" yaml:\"fsgid,omitempty\"`\n\tGids []int64 `json:\"gids,omitempty\" yaml:\"gids,omitempty\"`\n\tHostAliases []HostAlias `json:\"hostAliases,omitempty\" yaml:\"hostAliases,omitempty\"`\n\tHostIPC bool `json:\"hostIPC,omitempty\" yaml:\"hostIPC,omitempty\"`\n\tHostNetwork bool `json:\"hostNetwork,omitempty\" yaml:\"hostNetwork,omitempty\"`\n\tHostPID bool `json:\"hostPID,omitempty\" yaml:\"hostPID,omitempty\"`\n\tHostname string `json:\"hostname,omitempty\" yaml:\"hostname,omitempty\"`\n\tImagePullSecrets []LocalObjectReference `json:\"imagePullSecrets,omitempty\" yaml:\"imagePullSecrets,omitempty\"`\n\tNodeID string `json:\"nodeId,omitempty\" yaml:\"nodeId,omitempty\"`\n\tObjectMeta *ObjectMeta `json:\"metadata,omitempty\" yaml:\"metadata,omitempty\"`\n\tOverhead map[string]string `json:\"overhead,omitempty\" yaml:\"overhead,omitempty\"`\n\tPreemptionPolicy string `json:\"preemptionPolicy,omitempty\" yaml:\"preemptionPolicy,omitempty\"`\n\tReadinessGates []PodReadinessGate `json:\"readinessGates,omitempty\" yaml:\"readinessGates,omitempty\"`\n\tRestartPolicy string `json:\"restartPolicy,omitempty\" yaml:\"restartPolicy,omitempty\"`\n\tRunAsGroup *int64 `json:\"runAsGroup,omitempty\" yaml:\"runAsGroup,omitempty\"`\n\tRunAsNonRoot *bool `json:\"runAsNonRoot,omitempty\" yaml:\"runAsNonRoot,omitempty\"`\n\tRuntimeClassName string `json:\"runtimeClassName,omitempty\" yaml:\"runtimeClassName,omitempty\"`\n\tScale *int64 `json:\"scale,omitempty\" yaml:\"scale,omitempty\"`\n\tScheduling *Scheduling `json:\"scheduling,omitempty\" yaml:\"scheduling,omitempty\"`\n\tSeccompProfile *SeccompProfile `json:\"seccompProfile,omitempty\" yaml:\"seccompProfile,omitempty\"`\n\tSelector *LabelSelector `json:\"selector,omitempty\" yaml:\"selector,omitempty\"`\n\tServiceAccountName string `json:\"serviceAccountName,omitempty\" yaml:\"serviceAccountName,omitempty\"`\n\tSetHostnameAsFQDN *bool `json:\"setHostnameAsFQDN,omitempty\" yaml:\"setHostnameAsFQDN,omitempty\"`\n\tShareProcessNamespace *bool `json:\"shareProcessNamespace,omitempty\" yaml:\"shareProcessNamespace,omitempty\"`\n\tStatefulSetConfig *StatefulSetConfig `json:\"statefulSetConfig,omitempty\" yaml:\"statefulSetConfig,omitempty\"`\n\tSubdomain string `json:\"subdomain,omitempty\" yaml:\"subdomain,omitempty\"`\n\tSysctls []Sysctl `json:\"sysctls,omitempty\" yaml:\"sysctls,omitempty\"`\n\tTerminationGracePeriodSeconds *int64 `json:\"terminationGracePeriodSeconds,omitempty\" yaml:\"terminationGracePeriodSeconds,omitempty\"`\n\tTopologySpreadConstraints []TopologySpreadConstraint `json:\"topologySpreadConstraints,omitempty\" yaml:\"topologySpreadConstraints,omitempty\"`\n\tUid *int64 `json:\"uid,omitempty\" yaml:\"uid,omitempty\"`\n\tVolumes []Volume `json:\"volumes,omitempty\" yaml:\"volumes,omitempty\"`\n\tWindowsOptions *WindowsSecurityContextOptions `json:\"windowsOptions,omitempty\" yaml:\"windowsOptions,omitempty\"`\n}\n", "meta": {"content_hash": "2c8148bdf5839f1bfa3f8340ae67bd26", "timestamp": "", "source": "github", "line_count": 88, "max_line_length": 157, "avg_line_length": 89.73863636363636, "alnum_prop": 0.609725212105863, "repo_name": "rancherio/rancher", "id": "f6c34d4ef8c3521031a7be0be14f8c275599d81f", "size": "7897", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "pkg/client/generated/project/v3/zz_generated_stateful_set_spec.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Python", "bytes": "6795"}, {"name": "Shell", "bytes": "25328"}]}} {"text": "$LOAD_PATH.unshift './lib'\n\nrequire 'tilia/dav'\nrequire 'rack'\n\nTime.zone = 'Berlin'\n\ntestserver_root = File.join(File.dirname(__FILE__), 'testserver_root')\nDir.mkdir(testserver_root) unless File.exist?(testserver_root)\n\nfail \"could not create root directory #{testserver_root}\" unless File.directory?(testserver_root)\n\napp = proc do |env|\n root = Tilia::Dav::Fs::Directory.new(testserver_root)\n server = Tilia::Dav::Server.new(env, [root])\n\n server.add_plugin(Tilia::Dav::Browser::Plugin.new)\n\n server.exec\nend\n\nRack::Handler::WEBrick.run app\n", "meta": {"content_hash": "cc738eb13c043629a7de5cdba34238a4", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 97, "avg_line_length": 24.90909090909091, "alnum_prop": 0.7226277372262774, "repo_name": "tilia/tilia-dav", "id": "5777ca23fc049c5001c704b5f1a705f423fa19ca", "size": "627", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "examples/minimal.rb", "mode": "33261", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "17084"}, {"name": "Ruby", "bytes": "1797621"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.Linq;\nusing System.Web;\nusing iTextSharp.text;\nusing iTextSharp.text.pdf;\nusing SysAgropec.Class;\nusing SysAgropec.Models;\n\nnamespace SysAgropec.Class\n{\n public class Relatorio : RelatorioHelper\n {\n\n public override void MontaCorpoDados()\n {\n base.MontaCorpoDados();\n \n PdfPTable table = new PdfPTable(5);\n BaseColor preto = new BaseColor(0, 0, 0);\n BaseColor fundo = new BaseColor(200, 200, 200);\n Font font = FontFactory.GetFont(\"Verdana\", 8, Font.NORMAL, preto);\n Font titulo = FontFactory.GetFont(\"Verdana\", 8, Font.BOLD, preto);\n\n float[] colsW = { 10, 10, 10, 10, 10 };\n table.SetWidths(colsW);\n table.HeaderRows = 1;\n table.WidthPercentage = 100f;\n\n table.DefaultCell.Border = PdfPCell.BOTTOM_BORDER;\n table.DefaultCell.BorderColor = preto;\n table.DefaultCell.BorderColorBottom = new BaseColor(255, 255, 255);\n table.DefaultCell.Padding = 10;\n\n \n if (TipoRelatorio == 0) {\n\n table.AddCell(getNewCell(\"Animal\", titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"Registro\", titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"Tatuagem\", titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"Sexo\", titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"Cadastrado Em:\", titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER, preto, fundo));\n\n\n AnimalViewModel a = new AnimalViewModel();\n\n HttpContext httpContext = HttpContext.Current;\n\n int idfazenda =Convert.ToInt16( httpContext.ApplicationInstance.Session[\"idfazenda\"].ToString());\n\n var animais = a.RelatorioAnimais(idfazenda, datIni, datFin);\n\n var animalOLD = 0;\n\n foreach(var an in animais)\n {\n\n if (an.Raca_ID != animalOLD)\n {\n var cell = getNewCell(\"Ra\u00e7a: \" + an.nomeraca, titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER);\n cell.Colspan = 5;\n table.AddCell(cell);\n animalOLD = an.Raca_ID;\n }\n\n table.AddCell(getNewCell(an.Descricao, font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(an.Registro, font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(an.Tatuagem, font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(an.descriSexo, font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(an.Datacadastro.ToString(\"dd/MM/yyyy\"), font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n\n }\n\n var cell2 = getNewCell(\"Filtros de Pesquisa \" + Filtros, titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER);\n cell2.Colspan = 5;\n table.AddCell(cell2);\n\n\n }\n else if(TipoRelatorio == 1)\n {\n table.AddCell(getNewCell(\"Medicamento\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"Qtd M\u00ednima\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"Qtd Atual\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"Estocado Em:\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n\n Estoque_MedicamentoViewModel a = new Estoque_MedicamentoViewModel();\n\n var es = a.RelatorioEstoque(datIni, datFin);\n \n foreach (var an in es)\n {\n \n table.AddCell(getNewCell(an.nomeMedicamento, font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(Convert.ToString(an.Quantidademinima), font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(Convert.ToString(an.Quantidadeatual), font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(an.Data_Estocado.ToString(\"dd/MM/yyyy\"), font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(\"\", font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n \n }\n\n var cell2 = getNewCell(\"Filtros de Pesquisa \" + Filtros, titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER);\n cell2.Colspan = 5;\n table.AddCell(cell2);\n }\n else\n {\n table.AddCell(getNewCell(\"Data de Produ\u00e7\u00e3o\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"Qtd (KG)\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"Observa\u00e7\u00e3o\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n table.AddCell(getNewCell(\"\", titulo, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER, preto, fundo));\n\n ProducaoViewModel prod = new ProducaoViewModel();\n\n var prods = prod.RelatorioAnimais(datIni, datFin);\n\n var prodOLD = 0;\n\n double totalProduzido = 0;\n double totalGeral = 0;\n int contador = 0;\n foreach (var pr in prods)\n {\n int quantidadeRegistros = prods.Count;\n contador++;\n\n if (pr.Animail_ID != prodOLD)\n {\n if (totalProduzido > 0)\n {\n var cell1 = getNewCell(\"Total produzido: \" + totalProduzido + \" KG\", titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER);\n cell1.Colspan = 5;\n table.AddCell(cell1);\n totalGeral = totalGeral + totalProduzido;\n\n }\n\n var cell = getNewCell(\"Animal: \" + pr.nomeAnimal, titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER);\n cell.Colspan = 5;\n table.AddCell(cell);\n prodOLD = pr.Animail_ID;\n\n totalProduzido = 0;\n }\n\n table.AddCell(getNewCell(pr.Datarealizada.ToShortDateString(), font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(Convert.ToString(pr.Quantidade), font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(pr.Observacao, font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(\"\", font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n table.AddCell(getNewCell(\"\", font, Element.ALIGN_LEFT, 5, PdfPCell.BOTTOM_BORDER));\n totalProduzido = totalProduzido + pr.Quantidade;\n\n if (contador == quantidadeRegistros)\n {\n var cell1 = getNewCell(\"Total produzido: \" + totalProduzido + \" KG\", titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER);\n cell1.Colspan = 5;\n table.AddCell(cell1);\n\n totalGeral = totalGeral + totalProduzido;\n }\n\n }\n\n var cell2 = getNewCell(\"Total Geral: \" + totalGeral + \" KG\", titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER);\n cell2.Colspan = 5;\n table.AddCell(cell2);\n\n var cell3 = getNewCell(\"Filtros de Pesquisa \" + Filtros, titulo, Element.ALIGN_LEFT, 10, PdfPCell.BOTTOM_BORDER);\n cell3.Colspan = 5;\n table.AddCell(cell3);\n \n }\n \n doc.Add(table);\n }\n\n \n }\n}", "meta": {"content_hash": "72caf857cf70fdea2f0fc8a19565d814", "timestamp": "", "source": "github", "line_count": 181, "max_line_length": 153, "avg_line_length": 48.049723756906076, "alnum_prop": 0.5604231344141658, "repo_name": "ThamiresMancilio/TCC", "id": "e9583029604166bff26f1623faab440c4cf337f6", "size": "8705", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SysAgropec/SysAgropec/Class/Relatorio.cs", "mode": "33188", "license": "mit", "language": [{"name": "ASP", "bytes": "202"}, {"name": "C#", "bytes": "456820"}, {"name": "CSS", "bytes": "289439"}, {"name": "JavaScript", "bytes": "1398046"}, {"name": "PowerShell", "bytes": "177592"}]}} {"text": "\n\n\n\n\n\nUses of Class org.apache.poi.hwpf.model.types.TBDAbstractType (POI API Documentation)\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n \n \n \n \n \n \n \n \n \n \n
Overview  Package  Class   Use  Tree  Deprecated  Index  Help 
\n
\n\n
\n PREV \n NEXT\n FRAMES  \n NO FRAMES  \n \n\n\n\n
\n\n\n\n
\n
\n

\nUses of Class
org.apache.poi.hwpf.model.types.TBDAbstractType

\n
\n\n\n\n\n\n\n\n\n\n
\nPackages that use TBDAbstractType
org.apache.poi.hwpf.model  
\n \n

\n\n\n\n\n\n
\nUses of TBDAbstractType in org.apache.poi.hwpf.model
\n \n

\n\n\n\n\n\n\n\n\n\n
Subclasses of TBDAbstractType in org.apache.poi.hwpf.model
\n classTabDescriptor\n\n
\n          Tab descriptor.
\n \n

\n


\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n \n \n \n \n \n \n \n \n \n \n
Overview  Package  Class   Use  Tree  Deprecated  Index  Help 
\n
\n\n
\n PREV \n NEXT\n FRAMES  \n NO FRAMES  \n \n\n\n\n
\n\n\n\n
\n\n Copyright 2012 The Apache Software Foundation or\n its licensors, as applicable.\n \n\n\n", "meta": {"content_hash": "a5471c70fcd296c6061c079a61600013", "timestamp": "", "source": "github", "line_count": 183, "max_line_length": 318, "avg_line_length": 44.43715846994535, "alnum_prop": 0.6158386620757501, "repo_name": "Stephania16/ProductDesignGame", "id": "d17b86608108f73dd5441560fa2b281c0fad0e56", "size": "8132", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MinimaxAlgorithm/poi-3.8/docs/apidocs/org/apache/poi/hwpf/model/types/class-use/TBDAbstractType.html", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "15337"}, {"name": "HTML", "bytes": "74071650"}, {"name": "Java", "bytes": "37924"}]}} {"text": "\n\n\n\n\n\nV8 API Reference Guide for node.js v8.6.0: Member List\n\n\n\n\n\n\n\n\n\n\n
\n
\n\n \n \n \n \n \n
\n
V8 API Reference Guide for node.js v8.6.0\n
\n
\n
\n\n\n\n
\n \n
\n \n\n
\n
\n\n\n
\n\n
\n\n
\n \n
\n
\n
\n
\n
v8::Task Member List
\n
\n
\n\n

This is the complete list of members for v8::Task, including all inherited members.

\n\n \n \n
Run()=0 (defined in v8::Task)v8::Taskpure virtual
~Task()=default (defined in v8::Task)v8::Taskvirtual
\n\n
\nGenerated by  \n\"doxygen\"/\n 1.8.11\n
\n\n\n", "meta": {"content_hash": "39c70ece54199c42a55df6c4f32976e1", "timestamp": "", "source": "github", "line_count": 108, "max_line_length": 287, "avg_line_length": 45.583333333333336, "alnum_prop": 0.6416819012797075, "repo_name": "v8-dox/v8-dox.github.io", "id": "deb53b715e952cd65334a43479530468d409c8ba", "size": "4923", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "2e75ac0/html/classv8_1_1Task-members.html", "mode": "33188", "license": "mit", "language": []}} {"text": "title: Telerik.Web.UI.BreadcrumbClientEvents\npage_title: Telerik.Web.UI.BreadcrumbClientEvents\ndescription: Telerik.Web.UI.BreadcrumbClientEvents\n---\n\n# Telerik.Web.UI.BreadcrumbClientEvents\n\nDefines the client events handlers.\n\n## Inheritance Hierarchy\n\n* System.Object\n* Telerik.Web.StateManager : IMarkableStateManager, IStateManager\n* Telerik.Web.UI.BreadcrumbClientEvents : IDefaultCheck\n\n## Properties\n\n### OnChange `String`\n\nFires when the value of the Breadcrumb is changed.\n\n### OnClick `String`\n\nFires when an item or a rootitem is clicked.\n\n### OnInitialize `String`\n\nFired when the control is initialized.\n\n### OnLoad `String`\n\nFired when the control is loaded on the page.\n\n", "meta": {"content_hash": "539d82a9eace8e95fd39b98e2a7a51de", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 65, "avg_line_length": 20.939393939393938, "alnum_prop": 0.7771345875542692, "repo_name": "telerik/ajax-docs", "id": "9ed81c353cab7bd85a80dae4e117517da68b1dcc", "size": "695", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "api/server/Telerik.Web.UI/BreadcrumbClientEvents.md", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ASP.NET", "bytes": "2253"}, {"name": "C#", "bytes": "6026"}, {"name": "HTML", "bytes": "2240"}, {"name": "JavaScript", "bytes": "3052"}, {"name": "Ruby", "bytes": "3275"}]}} {"text": "
\n
\n
\n
\n
\n
\n

Confirm Alumni Request Fulfillment for {{ orgName }}

\n
\n \n
\n
\n \n
\n \n
\n
\n
\n
\n
", "meta": {"content_hash": "37b1b71329d7140fa7e3398826ce9590", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 181, "avg_line_length": 54.333333333333336, "alnum_prop": 0.5705521472392638, "repo_name": "Srokit/UMStudentOrgAlumniRequestGW", "id": "f30aa6f95d353d63fd15f114f38adc2643c62c84", "size": "1141", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ng_app/comps/fulfillrequest/fulfillrequest.view.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "185"}, {"name": "HTML", "bytes": "34247"}, {"name": "JavaScript", "bytes": "52993"}]}} {"text": "package site.zhangqun.lanagina;\n\nimport org.junit.Test;\n\nimport static org.junit.Assert.*;\n\n/**\n * Example local unit test, which will execute on the development machine (host).\n *\n * @see Testing documentation\n */\npublic class ExampleUnitTest {\n @Test\n public void addition_isCorrect() throws Exception {\n assertEquals(4, 2 + 2);\n }\n}", "meta": {"content_hash": "e86e6b6ab3a8a1123bf2670896f0a35a", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 81, "avg_line_length": 23.529411764705884, "alnum_prop": 0.6925, "repo_name": "uffuff/LSMS", "id": "9f5f92f824d0df4c8377375ec92eb1f17b931b70", "size": "400", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/test/java/site/zhangqun/lanagina/ExampleUnitTest.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "61834"}, {"name": "Groovy", "bytes": "11701"}, {"name": "HTML", "bytes": "255867"}, {"name": "Java", "bytes": "149374"}, {"name": "JavaScript", "bytes": "186353"}, {"name": "PHP", "bytes": "2333"}]}} {"text": "\"\"\"\n relief.schema.meta\n ~~~~~~~~~~~~~~~~~~\n\n :copyright: 2013 by Daniel Neuh\u00e4user\n :license: BSD, see LICENSE.rst for details\n\"\"\"\nfrom relief.utils import class_cloner\nfrom relief.constants import Unspecified\nfrom relief.schema.core import BaseElement\n\n\nclass Maybe(BaseElement):\n \"\"\"\n A meta element that represents an element that is optional. The value of\n this element will be `None` unless the contained element's value is not\n `Unspecified`.\n\n >>> from relief import Unicode\n >>> Maybe.of(Unicode)().value\n None\n >>> Maybe.of(Unicode)(u'foobar').value\n u'foobar\n\n .. versionadded:: 2.1.0\n \"\"\"\n member_schema = None\n\n @class_cloner\n def of(cls, schema):\n cls.member_schema = schema\n return cls\n\n def __init__(self, value=Unspecified):\n self.member = self.member_schema()\n super(Maybe, self).__init__(value)\n if self.member_schema is None:\n raise TypeError('member_schema is unknown')\n\n @property\n def value(self):\n return None if self.member.value is Unspecified else self.member.value\n\n @value.setter\n def value(self, new_value):\n if new_value is not Unspecified:\n raise AttributeError(\"can't set attribute\")\n\n def serialize(self, value):\n if value is None:\n return Unspecified\n return self.member.serialize(value)\n\n def unserialize(self, raw_value):\n value = self.member.unserialize(raw_value)\n return None if value is Unspecified else value\n\n def set_from_raw(self, raw_value):\n self.raw_value = raw_value\n value = self.unserialize(raw_value)\n if value is None:\n self.member.set_from_raw(Unspecified)\n else:\n self.member.set_from_raw(raw_value)\n self.is_valid = None\n\n def set_from_native(self, value):\n self.member.set_from_native(value)\n self.raw_value = self.member.raw_value\n self.is_valid = None\n\n def validate(self, context=None):\n if context is None:\n context = {}\n self.is_valid = self.member.validate() or self.value is None\n return self.is_valid\n", "meta": {"content_hash": "bbc965975be60db1f6c19f3ada479c22", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 78, "avg_line_length": 28.539473684210527, "alnum_prop": 0.6251728907330567, "repo_name": "DasIch/relief", "id": "3587702365831b464512bf75714095614e3c9290", "size": "2186", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "relief/schema/meta.py", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Python", "bytes": "117437"}, {"name": "Shell", "bytes": "6701"}]}} {"text": "package serialization\n\nimport (\n\t\"github.com/cloudfoundry-incubator/receptor\"\n\t\"github.com/cloudfoundry-incubator/runtime-schema/models\"\n)\n\nfunc ActualLRPToResponse(actualLRP models.ActualLRP, evacuating bool) receptor.ActualLRPResponse {\n\treturn receptor.ActualLRPResponse{\n\t\tProcessGuid: actualLRP.ProcessGuid,\n\t\tInstanceGuid: actualLRP.InstanceGuid,\n\t\tCellID: actualLRP.CellID,\n\t\tDomain: actualLRP.Domain,\n\t\tIndex: actualLRP.Index,\n\t\tAddress: actualLRP.Address,\n\t\tPorts: PortMappingFromModel(actualLRP.Ports),\n\t\tState: actualLRPStateToResponseState(actualLRP.State),\n\t\tPlacementError: actualLRP.PlacementError,\n\t\tSince: actualLRP.Since,\n\t\tCrashCount: actualLRP.CrashCount,\n\t\tCrashReason: actualLRP.CrashReason,\n\t\tEvacuating: evacuating,\n\t\tModificationTag: actualLRPModificationTagToResponseModificationTag(actualLRP.ModificationTag),\n\t}\n}\n\nfunc actualLRPStateToResponseState(state models.ActualLRPState) receptor.ActualLRPState {\n\tswitch state {\n\tcase models.ActualLRPStateUnclaimed:\n\t\treturn receptor.ActualLRPStateUnclaimed\n\tcase models.ActualLRPStateClaimed:\n\t\treturn receptor.ActualLRPStateClaimed\n\tcase models.ActualLRPStateRunning:\n\t\treturn receptor.ActualLRPStateRunning\n\tcase models.ActualLRPStateCrashed:\n\t\treturn receptor.ActualLRPStateCrashed\n\tdefault:\n\t\treturn receptor.ActualLRPStateInvalid\n\t}\n}\n\nfunc actualLRPModificationTagToResponseModificationTag(modificationTag models.ModificationTag) receptor.ModificationTag {\n\treturn receptor.ModificationTag{\n\t\tEpoch: modificationTag.Epoch,\n\t\tIndex: modificationTag.Index,\n\t}\n}\n", "meta": {"content_hash": "081f0633d2a532b022420bf0e8d0a3ee", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 121, "avg_line_length": 34.59574468085106, "alnum_prop": 0.7878228782287823, "repo_name": "mikegehard/lattice", "id": "f902410a95f756fb23089e415597384a13835119", "size": "1626", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ltc/Godeps/_workspace/src/github.com/cloudfoundry-incubator/receptor/serialization/actual_lrps.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Go", "bytes": "345389"}, {"name": "Shell", "bytes": "43093"}]}} {"text": "\n\npackage org.apache.webapp.admin.realm;\n\nimport java.io.IOException;\nimport java.net.URLEncoder;\nimport java.util.Locale;\nimport java.util.ArrayList;\nimport javax.servlet.ServletException;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport javax.servlet.http.HttpSession;\nimport org.apache.struts.action.Action;\nimport org.apache.struts.action.ActionErrors;\nimport org.apache.struts.action.ActionForm;\nimport org.apache.struts.action.ActionForward;\nimport org.apache.struts.action.ActionMapping;\nimport org.apache.struts.util.MessageResources;\nimport org.apache.webapp.admin.LabelValueBean;\nimport org.apache.webapp.admin.Lists;\n\n/**\n * The Action that sets up Add Realm transactions.\n *\n * @author Manveen Kaur\n * @version $Revision: 1.5 $ $Date: 2002/08/13 22:35:20 $\n */\n\npublic class AddRealmAction extends Action {\n\n /**\n * The MessageResources we will be retrieving messages from.\n */\n private MessageResources resources = null;\n\n // the list for types of realms\n private ArrayList types = null;\n\n // --------------------------------------------------------- Public Methods\n\n /**\n * Process the specified HTTP request, and create the corresponding HTTP\n * response (or forward to another web component that will create it).\n * Return an ActionForward instance describing where and how\n * control should be forwarded, or null if the response has\n * already been completed.\n *\n * @param mapping The ActionMapping used to select this instance\n * @param actionForm The optional ActionForm bean for this request (if any)\n * @param request The HTTP request we are processing\n * @param response The HTTP response we are creating\n *\n * @exception IOException if an input/output error occurs\n * @exception ServletException if a servlet exception occurs\n */\n public ActionForward perform(ActionMapping mapping,\n ActionForm form,\n HttpServletRequest request,\n HttpServletResponse response)\n throws IOException, ServletException {\n\n // Acquire the resources that we need\n HttpSession session = request.getSession();\n Locale locale = (Locale) session.getAttribute(Action.LOCALE_KEY);\n if (resources == null) {\n resources = getServlet().getResources();\n }\n\n // Fill in the form values for display and editing\n\n String realmTypes[] = new String[4];\n realmTypes[0] = \"UserDatabaseRealm\";\n realmTypes[1] = \"JNDIRealm\";\n realmTypes[2] = \"MemoryRealm\";\n realmTypes[3] = \"JDBCRealm\";\n\n String parent = request.getParameter(\"parent\");\n String type = request.getParameter(\"type\");\n if (type == null)\n type = \"UserDatabaseRealm\"; // default type is UserDatabaseRealm\n\n types = new ArrayList();\n // the first element in the select list should be the type selected\n types.add(new LabelValueBean(type,\n \"/admin/AddRealm.do?parent=\" + URLEncoder.encode(parent)\n + \"&type=\" + type));\n for (int i=0; i< realmTypes.length; i++) {\n if (!type.equalsIgnoreCase(realmTypes[i])) {\n types.add(new LabelValueBean(realmTypes[i],\n \"/admin/AddRealm.do?parent=\" + URLEncoder.encode(parent)\n + \"&type=\" + realmTypes[i]));\n }\n }\n\n if (\"UserDatabaseRealm\".equalsIgnoreCase(type)) {\n createUserDatabaseRealm(session, parent);\n } else if (\"JNDIRealm\".equalsIgnoreCase(type)) {\n createJNDIRealm(session, parent);\n } else if (\"MemoryRealm\".equalsIgnoreCase(type)) {\n createMemoryRealm(session, parent);\n } else {\n //JDBC\n createJDBCRealm(session, parent);\n }\n // Forward to the realm display page\n return (mapping.findForward(type));\n\n }\n\n private void createUserDatabaseRealm(HttpSession session, String parent) {\n\n UserDatabaseRealmForm realmFm = new UserDatabaseRealmForm();\n session.setAttribute(\"userDatabaseRealmForm\", realmFm);\n realmFm.setAdminAction(\"Create\");\n realmFm.setObjectName(\"\");\n realmFm.setParentObjectName(parent);\n String realmType = \"UserDatabaseRealm\";\n realmFm.setNodeLabel(\"Realm (\" + realmType + \")\");\n realmFm.setRealmType(realmType);\n realmFm.setDebugLvl(\"0\");\n realmFm.setResource(\"\");\n realmFm.setDebugLvlVals(Lists.getDebugLevels());\n realmFm.setRealmTypeVals(types);\n }\n\n private void createJNDIRealm(HttpSession session, String parent) {\n\n JNDIRealmForm realmFm = new JNDIRealmForm();\n session.setAttribute(\"jndiRealmForm\", realmFm);\n realmFm.setAdminAction(\"Create\");\n realmFm.setObjectName(\"\");\n realmFm.setParentObjectName(parent);\n String realmType = \"JNDIRealm\";\n realmFm.setNodeLabel(\"Realm (\" + realmType + \")\");\n realmFm.setRealmType(realmType);\n realmFm.setDebugLvl(\"0\");\n realmFm.setDigest(\"\");\n realmFm.setRoleBase(\"\");\n realmFm.setUserSubtree(\"false\");\n realmFm.setRoleSubtree(\"false\");\n realmFm.setRolePattern(\"\");\n realmFm.setUserRoleName(\"\");\n realmFm.setRoleName(\"\");\n realmFm.setRoleBase(\"\");\n realmFm.setContextFactory(\"\");\n realmFm.setUserBase(\"\");\n realmFm.setUserPattern(\"\");\n realmFm.setUserSearch(\"\");\n realmFm.setUserPassword(\"\");\n realmFm.setConnectionName(\"\");\n realmFm.setConnectionPassword(\"\");\n realmFm.setConnectionURL(\"\");\n realmFm.setDebugLvlVals(Lists.getDebugLevels());\n realmFm.setSearchVals(Lists.getBooleanValues());\n realmFm.setRealmTypeVals(types);\n }\n\n private void createMemoryRealm(HttpSession session, String parent) {\n\n MemoryRealmForm realmFm = new MemoryRealmForm();\n session.setAttribute(\"memoryRealmForm\", realmFm);\n realmFm.setAdminAction(\"Create\");\n realmFm.setObjectName(\"\");\n realmFm.setParentObjectName(parent);\n String realmType = \"MemoryRealm\";\n realmFm.setNodeLabel(\"Realm (\" + realmType + \")\");\n realmFm.setRealmType(realmType);\n realmFm.setDebugLvl(\"0\");\n realmFm.setPathName(\"\");\n realmFm.setDebugLvlVals(Lists.getDebugLevels());\n realmFm.setRealmTypeVals(types);\n }\n\n private void createJDBCRealm(HttpSession session, String parent) {\n\n JDBCRealmForm realmFm = new JDBCRealmForm();\n session.setAttribute(\"jdbcRealmForm\", realmFm);\n realmFm.setAdminAction(\"Create\");\n realmFm.setObjectName(\"\");\n realmFm.setParentObjectName(parent);\n String realmType = \"JDBCRealm\";\n realmFm.setNodeLabel(\"Realm (\" + realmType + \")\");\n realmFm.setRealmType(realmType);\n realmFm.setDebugLvl(\"0\");\n realmFm.setDigest(\"\");\n realmFm.setDriver(\"\");\n realmFm.setRoleNameCol(\"\");\n realmFm.setPasswordCol(\"\");\n realmFm.setUserTable(\"\");\n realmFm.setRoleTable(\"\");\n realmFm.setConnectionName(\"\");\n realmFm.setConnectionPassword(\"\");\n realmFm.setConnectionURL(\"\");\n realmFm.setDebugLvlVals(Lists.getDebugLevels());\n realmFm.setRealmTypeVals(types);\n }\n\n\n\n}\n", "meta": {"content_hash": "9f59cebd16b0faf2a91280e5b5d50f57", "timestamp": "", "source": "github", "line_count": 200, "max_line_length": 79, "avg_line_length": 37.285, "alnum_prop": 0.6403379375083814, "repo_name": "devjin24/howtomcatworks", "id": "f653b9093fbad4c95195360a324d63168ce11c8e", "size": "10327", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "bookrefer/jakarta-tomcat-4.1.12-src/webapps/admin/WEB-INF/classes/org/apache/webapp/admin/realm/AddRealmAction.java", "mode": "33261", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "52224"}, {"name": "C", "bytes": "2123859"}, {"name": "C++", "bytes": "5454"}, {"name": "CSS", "bytes": "4762"}, {"name": "HTML", "bytes": "232523"}, {"name": "Java", "bytes": "25349050"}, {"name": "Makefile", "bytes": "2331"}, {"name": "NSIS", "bytes": "25933"}, {"name": "Perl", "bytes": "100975"}, {"name": "Shell", "bytes": "49871"}, {"name": "Visual Basic", "bytes": "9998"}, {"name": "XSLT", "bytes": "27566"}]}} {"text": "\nUTF-8\n\n AboutDialog\n \n About Lambocoin\n Over Lambocoin\n \n \n <b>Lambocoin</b> version\n <html><head/><body><p><span style=\" font-weight:600;\">Lambocoin</span></p></body></html>\n \n \n\n AddressBookPage\n \n Address Book\n Adresboek\n \n \n These are your Lambocoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.\n Dit zijn je Lambocoin adressen om bedragen te ontvangen. Je kan een verschillend adres opgeven voor iedere geaddresseerde zodat je kan achterhalen wie jouw betaalt.\n \n \n Double-click to edit address or label\n Dubbelklik om adres of label te wijzigen\n \n \n Create a new address\n Maak een nieuw adres aan\n \n \n Copy the currently selected address to the system clipboard\n Kopieer het huidig geselecteerde adres naar het klembord\n \n \n &New Address\n &Nieuw Adres\n \n \n &Copy Address\n &Kopieer Adres\n \n \n Show &QR Code\n Toon &QR-Code\n \n \n Sign a message to prove you own this address\n Onderteken een bericht om te bewijzen dat u dit adres bezit\n \n \n &Sign Message\n &Onderteken Bericht\n \n \n Delete the currently selected address from the list. Only sending addresses can be deleted.\n Verwijder het huidige geselecteerde adres van de lijst. Alleen verzend-adressen kunnen verwijderd worden, niet uw ontvangstadressen.\n \n \n &Delete\n &Verwijder\n \n \n Copy &Label\n Kopieer &Label\n \n \n &Edit\n &Bewerken\n \n \n Export Address Book Data\n Exporteer Gegevens van het Adresboek\n \n \n Comma separated file (*.csv)\n Kommagescheiden bestand (*.csv)\n \n \n Error exporting\n Fout bij exporteren\n \n \n Could not write to file %1.\n Kon niet schrijven naar bestand %1.\n \n\n\n AddressTableModel\n \n Label\n Label\n \n \n Address\n Adres\n \n \n (no label)\n (geen label)\n \n \n\n AskPassphraseDialog\n \n Enter passphrase\n Huidige wachtwoordzin\n \n \n New passphrase\n Nieuwe wachtwoordzin\n \n \n Repeat new passphrase\n Herhaal wachtwoordzin\n \n \n Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.\n Vul een nieuw wachtwoord in voor uw portemonnee. <br/> Gebruik een wachtwoord van <b>10 of meer lukrake karakters</b>, of <b> acht of meer woorden</b> . \n \n \n Encrypt wallet\n Versleutel portemonnee\n \n \n This operation needs your wallet passphrase to unlock the wallet.\n Deze operatie vereist uw portemonnee wachtwoordzin om de portemonnee te openen.\n \n \n Unlock wallet\n Open portemonnee\n \n \n This operation needs your wallet passphrase to decrypt the wallet.\n Deze operatie vereist uw portemonnee wachtwoordzin om de portemonnee te ontsleutelen\n \n \n Decrypt wallet\n Ontsleutel portemonnee\n \n \n Change passphrase\n Wijzig de wachtwoordzin\n \n \n Confirm wallet encryption\n Bevestig versleuteling van de portemonnee\n \n \n Wallet encrypted\n Portemonnee versleuteld\n \n \n Warning: The Caps Lock key is on.\n Waarschuwing: De Caps-Lock-toets staat aan.\n \n \n Wallet encryption failed\n Portemonneeversleuteling mislukt\n \n \n WARNING: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR PAYCOINS</b>!\nAre you sure you wish to encrypt your wallet?\n WAARSCHUWING: Indien je de portemonnee versleutelt en je wachtwoordzin verliest, dan verlies je <b> AL JE PAYCOINS</b>!\nWeet je zeker dat je de portemonee wilt versleutelen?\n \n \n Lambocoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your Lambocoins from being stolen by malware infecting your computer.\n Lambocoin sluit nu af om het versleutelings proces te beeindigen. Onthoud dat het versleutelen van de portemonnee je Lambocoins niet volledig kan beschermen tegen schadelijke software op een geinfecteerde computer\n \n \n Wallet encryption failed due to an internal error. Your wallet was not encrypted.\n Portemonneeversleuteling mislukt door een interne fout. Uw portemonnee is niet versleuteld.\n \n \n The supplied passphrases do not match.\n De opgegeven wachtwoordzin is niet correct\n \n \n Wallet unlock failed\n Portemonnee openen mislukt\n \n \n The passphrase entered for the wallet decryption was incorrect.\n De opgegeven wachtwoordzin voor de portemonnee-ontsleuteling is niet correct.\n \n \n Wallet decryption failed\n Portemonnee-ontsleuteling mislukt\n \n \n Wallet passphrase was successfully changed.\n Portemonnee wachtwoordzin is succesvol gewijzigd\n \n\n\n BitcoinGUI\n \n &Overview\n &Overzicht\n \n \n Show general overview of wallet\n Toon algemeen overzicht van de portemonnee\n \n \n &Transactions\n &Transacties\n \n \n Browse transaction history\n Blader door transactieverleden\n \n \n &Address Book\n &Adresboek\n \n \n Edit the list of stored addresses and labels\n Bewerk de lijst van opgeslagen adressen en labels\n \n \n &Receive coins\n &Ontvang munten\n \n \n Show the list of addresses for receiving payments\n Toon lijst van betalingsadressen\n \n \n &Send coins\n &Verstuur munten\n \n \n Prove you control an address\n Bewijs dat u een adres bezit\n \n \n E&xit\n &Afsluiten\n \n \n Quit application\n Programma afsluiten\n \n \n Show information about Lambocoin\n Toon informatie over Lambocoin\n \n \n About &Qt\n Over &Qt\n \n \n Show information about Qt\n Toon informatie over Qt\n \n \n &Options...\n &Opties...\n \n \n &Export...\n &Exporteer...\n \n \n Export the data in the current tab to a file\n Exporteer de data in de huidige tab naar een bestand\n \n \n Encrypt or decrypt wallet\n Versleutel of ontsleutel portemonnee\n \n \n &Unlock Wallet for Minting Only\n &Ontsleutel portemonnee alleen om te minten\n \n \n Unlock wallet only for minting. Sending coins will still require the passphrase.\n Ontsleutel portemonnee alleen om te minten. Voor het versturen van munten is nog een wachtwoordzin nodig\n \n \n Backup wallet to another location\n &Backup portemonnee naar een andere locatie\n \n \n Change the passphrase used for wallet encryption\n Wijzig de wachtwoordzin voor uw portemonneversleuteling\n \n \n &Debug window\n &Debug scherm\n \n \n Open debugging and diagnostic console\n Open debugging en diagnostische console\n \n \n &File\n &Bestand\n \n \n &Settings\n &Instellingen\n \n \n &Help\n &Hulp\n \n \n Tabs toolbar\n Tab-werkbalk\n \n \n Actions toolbar\n Actie-werkbalk\n \n \n [testnet]\n [testnetwerk]\n \n \n Lambocoin Wallet\n Lambocoin portemonnee\n \n \n Send coins to a Lambocoin address\n Zend munten naar een Lambocoin adres\n \n \n &About Lambocoin\n Over Lambocoin\n \n \n Modify configuration options for Lambocoin\n Wijzig configuratie opties voor Lambocoin\n \n \n Show/Hide &Lambocoin\n Toon/Verberg &Lambocoin\n \n \n Show or hide the Lambocoin window\n Toon of verberg het Lambocoin scherm\n \n \n &Encrypt Wallet...\n &Versleutel Portemonnee\n \n \n &Backup Wallet...\n &Backup Portemonnee\n \n \n Lambocoin client\n Lambocoin client\n \n \n Synchronizing with network...\n Synchroniseren met netwerk...\n \n \n Downloaded %1 blocks of transaction history.\n %1 blokken van transactiehistorie opgehaald.\n \n \n %n second(s) ago\n %n seconde geleden%n seconde geleden\n \n \n %n minute(s) ago\n %n minuut geleden%n minuten geleden\n \n \n %n hour(s) ago\n %n uur geleden%n uur geleden\n \n \n %n day(s) ago\n %n dag geleden%n dagen geleden\n \n \n Up to date\n Bijgewerkt\n \n \n Catching up...\n Aan het bijwerken...\n \n \n Last received block was generated %1.\n Laatst ontvangen blok is %1 gegenereerd.\n \n \n Sent transaction\n Verzonden transactie\n \n \n Incoming transaction\n Binnenkomende transactie\n \n \n Date: %1\nAmount: %2\nType: %3\nAddress: %4\n\n Datum: %1\nBedrag: %2\nType: %3\nAdres: %4\n\n \n \n Wallet is <b>encrypted</b> and currently <b>unlocked for block minting only</b>\n Portemonnee is <b>versleuteld</b> en momenteel <b>geopend om blokken te minten</b>\n \n \n Wallet is <b>encrypted</b> and currently <b>unlocked</b>\n Portemonnee is <b>versleuteld</b> en momenteel <b>geopend</b>\n \n \n Wallet is <b>encrypted</b> and currently <b>locked</b>\n Portemonnee is <b>versleuteld</b> en momenteel <b>gesloten</b>\n \n \n Backup Wallet\n Backup Portemonnee\n \n \n Wallet Data (*.dat)\n Portemonnee-data (*.dat)\n \n \n Backup Failed\n Backup Mislukt\n \n \n There was an error trying to save the wallet data to the new location.\n Er is een fout opgetreden bij het wegschrijven van de portemonnee-data naar de nieuwe locatie.\n \n \n A fatal error occurred. Lambocoin can no longer continue safely and will quit.\n Een fatale fout heeft plaatsgevonden. Lambocoin kan niet langer veilig doorgaan en zal afsluiten.\n \n \n\n CoinControlDialog\n \n Coin Control\n Munt Controle\n \n \n Quantity:\n Aantal:\n \n \n 0\n 0\n \n \n Bytes:\n Bytes:\n \n \n Amount:\n Bedrag:\n \n \n 0.00 BTC\n 123.456 BTC {0.00 ?}\n \n \n Priority:\n Prioriteit:\n \n \n Fee:\n Transactiekosten:\n \n \n Low Output:\n Kopieer lage uitvoer\n \n \n no\n nee\n \n \n After Fee:\n Na aftrek kosten:\n \n \n Change:\n Teruggave:\n \n \n (un)select all\n Alles (de)selecteren\n \n \n Tree mode\n Toon boomstructuur\n \n \n List mode\n Lijst modus\n \n \n Amount\n Bedrag\n \n \n Address\n Adres\n \n \n Date\n Datum\n \n \n Confirmations\n Bevestigingen\n \n \n Confirmed\n Bevestigd\n \n \n Priority\n Prioriteit\n \n \n Copy address\n Kopieer adres\n \n \n Copy label\n Kopieer label\n \n \n Copy amount\n Kopieer bedrag\n \n \n Copy transaction ID\n Kopieer transactie ID\n \n \n Copy quantity\n Kopieer aantal\n \n \n Copy fee\n Kopieer kosten\n \n \n Copy after fee\n Kopieer na kosten\n \n \n Copy bytes\n Kopieer bytes\n \n \n Copy priority\n Kopieer prioriteit\n \n \n Copy low output\n Kopieer lage uitvoer\n \n \n Copy change\n Kopieer teruggave\n \n \n highest\n hoogste\n \n \n high\n Hoogste\n \n \n medium-high\n medium-hoog\n \n \n medium\n medium\n \n \n low-medium\n laag-medium\n \n \n low\n laag\n \n \n lowest\n laagste\n \n \n DUST\n STOF\n \n \n yes\n ja\n \n \n (no label)\n (geen label)\n \n \n change from %1 (%2)\n gewijzigd van %1 (%2)\n \n \n (change)\n (wijzig)\n \n \n\n DisplayOptionsPage\n \n &Unit to show amounts in: \n &Eenheid om bedrag in te tonen:\n \n \n Choose the default subdivision unit to show in the interface, and when sending coins\n Kies de standaard onderverdelingseenheid om weer te geven in uw programma, en voor het versturen van munten\n \n \n &Display addresses in transaction list\n Toon adressen in transactielijst\n \n \n Whether to show Lambocoin addresses in the transaction list\n Toon Lambocoinadressen in transactielijst\n \n \n Display coin control features (experts only!)\n Laat muntcontrole functies zien (alleen voor experts!)\n \n \n Whether to show coin control features or not\n Munt controle mogelijkheden of niet\n \n \n Warning\n Waarschuwing\n \n \n\n EditAddressDialog\n \n Edit Address\n Bewerk Adres\n \n \n &Label\n &Label\n \n \n The label associated with this address book entry\n Het label dat geassocieerd is met dit adres\n \n \n &Address\n &Adres\n \n \n The address associated with this address book entry. This can only be modified for sending addresses.\n Het adres dat geassocieerd is met deze adresboek-opgave. Dit kan alleen worden veranderd voor zend-adressen.\n \n \n New receiving address\n Nieuw ontvangstadres\n \n \n New sending address\n Nieuw adres om naar te versturen\n \n \n Edit receiving address\n Bewerk ontvangstadres\n \n \n Edit sending address\n Bewerk verzendadres\n \n \n The entered address \"%1\" is already in the address book.\n Het opgegeven adres \"%1\" bestaat al in uw adresboek.\n \n \n The entered address \"%1\" is not a valid Lambocoin address.\n Het ingevoerde adres \"%1\" is geen geldig Lambocoin adres.\n \n \n Could not unlock wallet.\n Kon de portemonnee niet openen.\n \n \n New key generation failed.\n Genereren nieuwe sleutel mislukt.\n \n\n\n GUIUtil::HelpMessageBox\n \n\n MainOptionsPage\n \n Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.\n Ontkoppel blok en adres database tijdens afsluiten. Dit betekent dat ze kunnen worden verplaatst naar een andere bestandslokatie, maar het afsluiten is daardoor langzamer. De portomonnee is altijd ontkoppeld.\n \n \n\n MintingTableModel\n \n Transaction\n Transactie\n \n \n Address\n Adres\n \n \n Age\n Leeftijd\n \n \n Balance\n Balans\n \n \n minutes\n minuten\n \n \n hours\n uren\n \n \n days\n dagen\n \n \n\n MintingView\n \n 10 min\n 10 min\n \n \n 24 hours\n 24 uur\n \n \n 30 days\n 30 dagen\n \n \n 90 days\n 90 dagen\n \n \n Comma separated file (*.csv)\n Kommagescheiden bestand (*.csv)\n \n \n Address\n Adres\n \n \n Transaction\n Transactie\n \n \n Age\n Leeftijd\n \n \n Balance\n Balans\n \n \n Error exporting\n Fout bij exporteren\n \n \n Could not write to file %1.\n Kon niet schrijven naar bestand %1.\n \n \n\n MultisigAddressEntry\n \n Form\n Vorm\n \n \n Alt+P\n Alt+P\n \n \n &Address:\n &Adres\n \n \n Choose address from address book\n Kies adres uit adresboek\n \n \n Alt+A\n Alt+A\n \n \n Label:\n Label:\n \n\n\n MultisigDialog\n \n Clear all\n Verwijder alles\n \n \n Fee:\n Transactiekosten:\n \n \n Paste address from clipboard\n Plak adres vanuit klembord\n \n \n Alt+P\n Alt+P\n \n \n\n MultisigInputEntry\n \n Form\n Vorm\n \n \n Alt+P\n Alt+P\n \n \n Alt+A\n Alt+A\n \n\n\n NetworkOptionsPage\n \n Network\n Netwerk\n \n \n\n OptionsDialog\n \n Options\n Opties\n \n\n\n OverviewPage\n \n Form\n Vorm\n \n \n Balance:\n Saldo:\n \n \n Number of transactions:\n Aantal transacties:\n \n \n Unconfirmed:\n Onbevestigd:\n \n \n Stake:\n Inzet:\n \n \n Wallet\n Portemonnee\n \n \n <b>Recent transactions</b>\n <b>Recente transacties</b>\n \n \n Your current balance\n Uw huidige saldo\n \n \n Your current stake\n Huidige inzet\n \n \n Total of transactions that have yet to be confirmed, and do not yet count toward the current balance\n Totaal aantal transacties dat nog moet worden bevestigd, en nog niet is meegeteld in uw huidige saldo \n \n \n Total number of transactions in wallet\n Totaal aantal transacties in uw portemonnee\n \n \n\n QRCodeDialog\n \n QR Code\n QR-code\n \n \n Request Payment\n Vraag betaling aan\n \n \n Amount:\n Bedrag:\n \n \n Label:\n Label:\n \n \n Message:\n Bericht:\n \n \n &Save As...\n &Opslaan Als...\n \n \n Error encoding URI into QR Code.\n Fout in codering URI naar QR-code\n \n \n Resulting URI too long, try to reduce the text for label / message.\n Resultaat URI te lang, probeer de tekst in te korten.\n \n \n PNG Images (*.png)\n PNG-Afbeeldingen (*.png)\n \n\n\n RPCConsole\n \n Client name\n Client naam\n \n \n N/A\n N.v.t.\n \n \n Client version\n Client versie\n \n \n Network\n Netwerk\n \n \n Number of connections\n Aantal connecties\n \n \n On testnet\n Op testnetwerk\n \n \n Block chain\n Blokkenketen\n \n \n Current number of blocks\n Huidig aantal blokken\n \n \n Estimated total blocks\n Geschat aantal blokken\n \n \n Last block time\n Laaste bloktijd\n \n \n Build date\n Bouwdatum\n \n \n Clear console\n Console opschonen\n \n \n Welcome to the Lambocoin RPC console.<br>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.<br>Type <b>help</b> for an overview of available commands.\n Welkom bij de Lambocoin RPC console. <br>Gebruik pijltjes naar boven en naar beneden om de geschiedenis te navigeren, en <b>Ctrl-L</b> om het scherm te wissen.<br>Typ <b>help</b> voor een overzicht met commandos.\n \n \n\n SendCoinsDialog\n \n Send Coins\n Verstuur munten\n \n \n Coin Control Features\n Munt controle opties\n \n \n Inputs...\n Toevoer...\n \n \n automatically selected\n automatisch geselecteerd\n \n \n Insufficient funds!\n Onvoldoende fondsen!\n \n \n Quantity:\n Aantal:\n \n \n 0\n 0\n \n \n Bytes:\n Bytes:\n \n \n Amount:\n Bedrag:\n \n \n 0.00 BTC\n 123.456 BTC {0.00 ?}\n \n \n Priority:\n Prioriteit:\n \n \n medium\n medium\n \n \n Fee:\n Transactiekosten:\n \n \n Low Output:\n Kopieer lage uitvoer\n \n \n no\n nee\n \n \n After Fee:\n Na aftrek kosten:\n \n \n Change\n Teruggave\n \n \n custom change address\n zelfopgegeven teruggaveadres\n \n \n Send to multiple recipients at once\n Verstuur aan verschillende ontvangers tegelijkertijd\n \n \n Remove all transaction fields\n Verwijder alle transactievelden\n \n \n Balance:\n Saldo:\n \n \n 123.456 BTC\n 123.456 BTC\n \n \n Confirm the send action\n Bevestig de verstuuractie\n \n \n &Send\n &Verstuur\n \n \n Copy quantity\n Kopieer aantal\n \n \n Copy amount\n Kopieer bedrag\n \n \n Copy fee\n Kopieer kosten\n \n \n Copy after fee\n Kopieer na kosten\n \n \n Copy bytes\n Kopieer bytes\n \n \n Copy priority\n Kopieer prioriteit\n \n \n Copy low output\n Kopieer lage uitvoer\n \n \n Copy change\n Kopieer teruggave\n \n \n <b>%1</b> to %2 (%3)\n <b>%1</b> aan %2 (%3)\n \n \n Confirm send coins\n Bevestig versturen munten\n \n \n Are you sure you want to send %1?\n Weet u zeker dat u %1 wil versturen?\n \n \n and \n en \n \n \n The amount to pay must be at least one cent (0.01).\n Het te betalen bedrag moet minimaal een cent zijn (0.01).\n \n \n Warning: Invalid Lambocoin address\n Waarschuwing: Ongeldig Lambocoin adres\n \n \n Warning: Unknown change address\n Waarschuwing: onbekend teruggave adres\n \n \n (no label)\n (geen label)\n \n \n Enter a Lambocoin address\n Voer een Lambocoin adres in\n \n \n\n SendCoinsEntry\n \n Form\n Vorm\n \n \n A&mount:\n Bedra&g:\n \n \n Pay &To:\n Betaal &Aan:\n \n \n Enter a label for this address to add it to your address book\n Vul een label in voor dit adres om het toe te voegen aan uw adresboek\n \n \n &Label:\n &Label:\n \n \n The address to send the payment to\n Het verzendadres voor de betaling\n \n \n Choose address from address book\n Kies adres uit adresboek\n \n \n Alt+A\n Alt+A\n \n \n Paste address from clipboard\n Plak adres vanuit klembord\n \n \n Alt+P\n Alt+P\n \n \n Remove this recipient\n Verwijder deze ontvanger\n \n\n\n SignVerifyMessageDialog\n \n Signatures - Sign / Verify a Message\n Ondertekeningen - Onderteken / verifieer een bericht\n \n \n &Sign Message\n &Onderteken Bericht\n \n \n You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.\n U kunt berichten ondertekenen met een van uw adressen om te bewijzen dat u dit adres bezit. Pas op dat u geen onduidelijke dingen ondertekent, want phishingaanvallen zouden u voor de gek kunnen houden om zo uw identiteit te stelen. Onderteken alleen berichten waarmee u het volledig eens bent.\n \n \n The address to sign the message with\n Het ondertekenings adres voor het bericht\n \n \n Choose previously used address\n Kies eerder gebruikt adres\n \n \n Alt+A\n Alt+A\n \n \n Paste address from clipboard\n Plak adres vanuit klembord\n \n \n Alt+P\n Alt+P\n \n \n Enter the message you want to sign here\n Typ hier het bericht dat u wilt ondertekenen\n \n \n Signature\n Ondertekening\n \n \n Copy the current signature to the system clipboard\n Kopieer de huidige onderteking naar het systeem klembord\n \n \n Sign the message to prove you own this Lambocoin address\n Bewijs dat je dit Lambocoin adres bezit door het te ondertekenen \n \n \n Sign &Message\n Onderteken &Bericht\n \n \n Reset all sign message fields\n Herinitialiseer alle ondertekende bericht velden\n \n \n Clear &All\n &Alles wissen\n \n \n &Verify Message\n &Verifieer bericht\n \n \n Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.\n Voer hieronder het ondertekeningsadres in, het bericht (vergeet niet precies alle regelafbrekingen, spaties, tabulaties, etc. in te voeren) en ondertekening om het bericht te verifieren. Wees voorzichtig om verder te lezen waar de tekst niet deel uitmaakt van het ondertekende bericht, dit om te voorkomen dat je verleid wordt door een man-in-het-midden aanval.\n \n \n The address the message was signed with\n Het adres waarmee het bericht getekend was\n \n \n Verify the message to ensure it was signed with the specified Lambocoin address\n Verifieer het bericht om vast te stellen dat het bericht ondertekend was met het gespecificeerde Lambocoin adres\n \n \n Verify &Message\n &Verifieer bericht\n \n \n Reset all verify message fields\n Herinitialiseer alle geverifieerde bericht velden\n \n \n Click \"Sign Message\" to generate signature\n Klik 'Onderteken bericht' om te ondertekenen\n \n \n Enter the signature of the message\n Voer de ondertekening in voor het bericht\n \n \n Enter a Lambocoin address\n Voer een Lambocoin adres in\n \n \n The entered address is invalid.\n Het ingevoerde adres is onjuist\n \n \n Please check the address and try again.\n Controleer het adres and probeer opnieuw.\n \n \n The entered address does not refer to a key.\n Het ingevoerde adres refereert niet naar een sleutel\n \n \n Wallet unlock was cancelled.\n Portemonnee ontsleuteling is afgebroken.\n \n \n Private key for the entered address is not available.\n Geheime sleutel voor het ingevoerde adres is niet beschikbaar.\n \n \n Message signing failed.\n Bericht ondertekening mislukt.\n \n \n Message signed.\n Bericht ontedertekend.\n \n \n The signature could not be decoded.\n De ondertekeing kan niet worden ontcijferd.\n \n \n Please check the signature and try again.\n Controleer de ondertekening and probeer opnieuw.\n \n \n The signature did not match the message digest.\n De ondertekening komt niet overeen met de bericht samenvatting.\n \n \n Message verification failed.\n Bericht verficatie mislukt.\n \n \n Message verified.\n Bericht geverifieerd.\n \n\n\n SplashScreen\n \n [testnet]\n [testnetwerk]\n \n\n\n TransactionDesc\n \n Open until %1\n Open tot %1\n \n \n %1/unconfirmed\n %1/onbevestigd\n \n \n %1 confirmations\n %1 bevestigingen\n \n \n Date\n Datum\n \n \n Transaction\n Transactie\n \n \n Amount\n Bedrag\n \n \n , has not been successfully broadcast yet\n , is nog niet succesvol uitgezonden\n \n \n unknown\n onbekend\n \n\n\n TransactionDescDialog\n \n Transaction details\n Transactiedetails\n \n \n This pane shows a detailed description of the transaction\n Dit venster laat een uitgebreide beschrijving van de transactie zien\n \n\n\n TransactionTableModel\n \n Date\n Datum\n \n \n Type\n Type\n \n \n Address\n Adres\n \n \n Amount\n Bedrag\n \n \n Open for %n block(s)\n Open gedurende %n blokOpen gedurende %n blokken\n \n \n Open until %1\n Open tot %1\n \n \n Offline (%1 confirmations)\n Niet verbonden (%1 bevestigingen)\n \n \n Unconfirmed (%1 of %2 confirmations)\n Onbevestigd (%1 van %2 bevestigd)\n \n \n Confirmed (%1 confirmations)\n Bevestigd (%1 bevestigingen)\n \n \n Mined balance will be available in %n more blocks\n Ontgonnen saldo komt beschikbaar na %n blokOntgonnen saldo komt beschikbaar na %n blokken\n \n \n This block was not received by any other nodes and will probably not be accepted!\n Dit blok is niet ontvangen bij andere nodes en zal waarschijnlijk niet worden geaccepteerd!\n \n \n Generated but not accepted\n Gegenereerd maar niet geaccepteerd\n \n \n Received with\n Ontvangen met\n \n \n Received from\n Ontvangen van\n \n \n Sent to\n Verzonden aan\n \n \n Payment to yourself\n Betaling aan uzelf\n \n \n Mined\n Ontgonnen\n \n \n Mint by stake\n Minten met inzet\n \n \n (n/a)\n (nvt)\n \n \n Transaction status. Hover over this field to show number of confirmations.\n Transactiestatus. Houd de muiscursor boven dit veld om het aantal bevestigingen te laten zien.\n \n \n Date and time that the transaction was received.\n Datum en tijd waarop deze transactie is ontvangen.\n \n \n Type of transaction.\n Type transactie.\n \n \n Destination address of transaction.\n Ontvangend adres van transactie\n \n \n Amount removed from or added to balance.\n Bedrag verwijderd van of toegevoegd aan saldo\n \n \n\n TransactionView\n \n All\n Alles\n \n \n Today\n Vandaag\n \n \n This week\n Deze week\n \n \n This month\n Deze maand\n \n \n Last month\n Vorige maand\n \n \n This year\n Dit jaar\n \n \n Range...\n Bereik...\n \n \n Received with\n Ontvangen met\n \n \n Sent to\n Verzonden aan\n \n \n To yourself\n Aan uzelf\n \n \n Mined\n Ontgonnen\n \n \n Mint by stake\n Minten met inzet\n \n \n Other\n Anders\n \n \n Enter address or label to search\n Vul adres of label in om te zoeken\n \n \n Min amount\n Min. bedrag\n \n \n Copy address\n Kopieer adres\n \n \n Copy label\n Kopieer label\n \n \n Copy amount\n Kopieer bedrag\n \n \n Edit label\n Bewerk label\n \n \n Export Transaction Data\n Exporteer transactiegegevens\n \n \n Comma separated file (*.csv)\n Kommagescheiden bestand (*.csv)\n \n \n Confirmed\n Bevestigd\n \n \n Date\n Datum\n \n \n Type\n Type\n \n \n Label\n Label\n \n \n Address\n Adres\n \n \n Amount\n Bedrag\n \n \n ID\n ID\n \n \n Error exporting\n Fout bij exporteren\n \n \n Could not write to file %1.\n Kon niet schrijven naar bestand %1.\n \n \n Range:\n Bereik:\n \n \n to\n naar\n \n\n\n WalletModel\n \n Sending...\n Versturen...\n \n\n\n WindowOptionsPage\n \n\n bitcoin-core\n \n Usage:\n Gebruik:\n \n \n Lambocoin version\n Lambocoin versie\n \n \n List commands\n List van commando's\n\n \n \n Get help for a command\n Toon hulp voor een commando\n\n \n \n Options:\n Opties:\n\n \n \n Specify configuration file (default: Lambocoin.conf)\n Configuratiebestand specificeren (standaard: Lambocoin.conf)\n \n \n Specify pid file (default: Lambocoind.pid)\n Specifieer pid-bestand (standaard: Lambocoind.pid)\n\n \n \n Generate coins\n Genereer munten\n\n \n \n Don't generate coins\n Genereer geen munten\n \n \n Specify data directory\n Stel datamap in\n\n \n \n Set database cache size in megabytes (default: 25)\n Database cache instellen in Mb (standaard: 25)\n \n \n Set database disk log size in megabytes (default: 100)\n Database logbestandgrootte instellen in Mb (standaard: 100)\n \n \n Specify connection timeout (in milliseconds)\n Specificeer de time-out tijd (in milliseconden)\n\n \n \n Maintain at most <n> connections to peers (default: 125)\n Onderhoud maximaal <n> verbindingen naar peers (standaard: 125)\n \n \n Add a node to connect to and attempt to keep the connection open\n Maak connectie met een node en houd deze open\n \n \n Connect only to the specified node\n Verbind alleen met deze node\n\n \n \n Accept connections from outside (default: 1)\n Connecties van buiten accepteren (standaard: 1)\n \n \n Find peers using DNS lookup (default: 1)\n Zoek anderen via DNS (standaard: 1)\n \n \n Threshold for disconnecting misbehaving peers (default: 100)\n Drempel om verbinding te verbreken naar zich misdragende peers (standaard: 100)\n \n \n Number of seconds to keep misbehaving peers from reconnecting (default: 86400)\n Aantal seconden dat zich misdragende peers niet opnieuw mogen verbinden (standaard: 86400)\n \n \n Maximum per-connection receive buffer, <n>*1000 bytes (default: 10000)\n Maximale ontvangstbuffer per connectie, <n>*1000 bytes (standaard: 10000)\n \n \n Maximum per-connection send buffer, <n>*1000 bytes (default: 10000)\n Maximale zendbuffer per connectie, <n>*1000 bytes (standaard: 10000)\n \n \n Use Universal Plug and Play to map the listening port (default: 1)\n Gebruik uPNP om de netwerk poort in te delen (standaard 1)\n \n \n Use Universal Plug and Play to map the listening port (default: 0)\n Gebruik uPNP om de netwerkpoort in te delen (standaard 0)\n \n \n Fee per KB to add to transactions you send\n Kosten per kB voor te versturen transacties\n \n \n Run in the background as a daemon and accept commands\n Draai in de achtergrond als daemon en aanvaard commando's\n\n \n \n Use the test network\n Gebruik het testnetwerk\n\n \n \n Prepend debug output with timestamp\n Voorzie de debuggingsuitvoer van een tijdsaanduiding\n \n \n Send trace/debug info to console instead of debug.log file\n Stuur opsporing/debug-info naar de console in plaats van het debug.log bestand\n \n \n Send trace/debug info to debugger\n Stuur opsporings/debug-info naar debugger\n \n \n Username for JSON-RPC connections\n Gebruikersnaam voor JSON-RPC verbindingen\n\n \n \n Password for JSON-RPC connections\n Wachtwoord voor JSON-RPC verbindingen\n\n \n \n Allow JSON-RPC connections from specified IP address\n Sta JSON-RPC verbindingen van opgegeven IP adres toe\n\n \n \n Send commands to node running on <ip> (default: 127.0.0.1)\n Verstuur commando's naar proces dat op <ip> draait (standaard: 127.0.0.1)\n\n \n \n Execute command when the best block changes (%s in cmd is replaced by block hash)\n Voer commando uit indien het hoogste blok verandert (%s in cmd is herplaats met blok hash)\n \n \n Upgrade wallet to latest format\n Opwaardeer portemonnee naar laatste formaat\n \n \n Set key pool size to <n> (default: 100)\n Stel sleutelpoelgrootte in op <n> (standaard: 100)\n\n \n \n Rescan the block chain for missing wallet transactions\n Doorzoek de blokkenketen op ontbrekende portemonnee-transacties\n \n \n How many blocks to check at startup (default: 2500, 0 = all)\n Aantal blokken bij opstarten controleren (standaard: 2500, 0 = alle)\n \n \n How thorough the block verification is (0-6, default: 1)\n Grondigheid blok verificatie (0-6, standaard: 1)\n \n \n Use OpenSSL (https) for JSON-RPC connections\n Gebruik OpenSSL (https) voor JSON-RPC verbindingen\n\n \n \n Server certificate file (default: server.cert)\n Certificaat-bestand voor server (standaard: server.cert)\n\n \n \n Server private key (default: server.pem)\n Geheime sleutel voor server (standaard: server.pem)\n\n \n \n Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)\n Aanvaardbare sleuteltypen (standaard: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)\n \n \n Cannot obtain a lock on data directory %s. Lambocoin is probably already running.\n Blokkeren van data folder %s is niet gelukt. Lambocoin is mogelijk al opgestart.\n \n \n Lambocoin\n Lambocoin\n \n \n Error loading wallet.dat: Wallet requires newer version of Lambocoin\n Fout geconstateerd bij het laden van wallet.dat: Portemonnee vereist een nieuwere versie van Lambocoin\n \n \n Wallet needed to be rewritten: restart Lambocoin to complete\n Portemonnee dient opnieuw bewerkt te worden: start Lambocoin opnieuw op om te voltooien\n \n \n Warning: Please check that your computer's date and time are correct. If your clock is wrong Lambocoin will not work properly.\n Waarschuwing: controleer of de datum en tijd op uw computer correct zijn. Indien uw klok verkeerd staat, zal Lambocoin niet goed werken\n \n \n Loading addresses...\n Adressen aan het laden...\n \n \n Loading block index...\n Blokindex aan het laden...\n \n \n Error loading blkindex.dat\n Fout bij laden blkindex.dat\n \n \n Loading wallet...\n Portemonnee aan het laden...\n \n \n Error loading wallet.dat: Wallet corrupted\n Fout bij laden wallet.dat: Portemonnee corrupt\n \n \n Error loading wallet.dat\n Fout bij laden wallet.dat\n \n \n Cannot downgrade wallet\n Kan portemonnee niet degraderen\n \n \n Cannot initialize keypool\n Kan sleutelpoel niet initaliseren\n \n \n Cannot write default address\n Kan niet schrijven naar standaard adres\n \n \n Rescanning...\n Opnieuw aan het scannen ...\n \n \n Done loading\n Klaar met laden\n \n \n Warning: -paytxfee is set very high. This is the transaction fee you will pay if you send a transaction.\n Waarschuwing: -paytxfee is zeer hoog ingesteld. Dit zijn de transactiekosten die u betaalt bij het versturen van een transactie.\n \n \n To use the %s option\n Gebruik de %s optie\n \n \n Error\n Fout\n \n \n An error occurred while setting up the RPC port %i for listening: %s\n Een fout geconstateerd met het opzetten van RPC port %i om te luisteren: %s\n \n \n You must set rpcpassword=<password> in the configuration file:\n%s\nIf the file does not exist, create it with owner-readable-only file permissions.\n Je moet rpcpassword=<password> in het configuratie bestand instellen:\n%s\nIndien het bestand niet bestaat, maak het aan met alleen gebruikers leesrechten op het bestand.\n \n \n Error: Wallet locked, unable to create transaction \n Fout: Portemonnee is gesloten, kan geen nieuwe transactie aanmaken\n \n \n Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds \n Fout: Deze transactie vereist minstens %s transactie kosten vanwege het bedrag, moeilijkheid, of recent ontvangen fondsen\n \n \n Error: Transaction creation failed \n Fout: Aanmaak transactie mislukt\n \n \n Sending...\n Versturen...\n \n \n Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.\n Fout: De transactie was afgewezen. Dit kan gebeuren als u eerder uitgegeven munten opnieuw wilt versturen, zoals wanneer u een kopie van uw wallet.dat heeft gebruikt en in de kopie deze munten zijn gemarkeerd als uitgegeven, maar in de huidige nog niet.\n \n \n Invalid amount\n Onjuist bedrag\n \n \n Insufficient funds\n Onvoldoende fondsen\n \n\n\n", "meta": {"content_hash": "e67a53e9ce08617135ab53d1810ab78b", "timestamp": "", "source": "github", "line_count": 1984, "max_line_length": 396, "avg_line_length": 36.482358870967744, "alnum_prop": 0.6512897030988796, "repo_name": "LambocoinFoundation/Lambocoin", "id": "606708bd1411301b8af658afd71f1adb0d717194", "size": "72381", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/qt/locale/paycoin_nl.ts", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "5537"}, {"name": "C++", "bytes": "2381407"}, {"name": "Makefile", "bytes": "9025"}, {"name": "NSIS", "bytes": "6629"}, {"name": "Objective-C", "bytes": "858"}, {"name": "Objective-C++", "bytes": "3537"}, {"name": "Python", "bytes": "54574"}, {"name": "QMake", "bytes": "11762"}, {"name": "Roff", "bytes": "12841"}, {"name": "Shell", "bytes": "5118"}]}} {"text": "\n\n#import \n\n#import \n#import \n#import \n#import \n\nNS_ASSUME_NONNULL_BEGIN\n\n/**\n Protocol for Classes to declare intent to implement responses to commands\n */\n@protocol FBCommandHandler \n\n/**\n * Should return map of FBRouteCommandHandler block with keys as supported routes\n *\n * @return map an NSArray of routes.\n */\n+ (NSArray *)routes;\n\n@optional\n/**\n * @return BOOL deciding if class should be added to route handlers automatically, default (if not implemented) is YES\n */\n+ (BOOL)shouldRegisterAutomatically;\n\n@end\n\nNS_ASSUME_NONNULL_END\n", "meta": {"content_hash": "059fc37c5e0ef04073302605c3d61e3a", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 118, "avg_line_length": 22.84375, "alnum_prop": 0.7688098495212038, "repo_name": "calabash/WebDriverAgent", "id": "bde028d68b01d9668b9ea11664fe07bfcf31d522", "size": "1039", "binary": false, "copies": "13", "ref": "refs/heads/master", "path": "WebDriverAgentLib/Routing/FBCommandHandler.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "2413"}, {"name": "HTML", "bytes": "191"}, {"name": "JavaScript", "bytes": "17764"}, {"name": "Objective-C", "bytes": "446149"}, {"name": "Shell", "bytes": "3516"}]}} {"text": "package io.budgetapp.configuration;\n\nimport com.bazaarvoice.dropwizard.assets.AssetsBundleConfiguration;\nimport com.bazaarvoice.dropwizard.assets.AssetsConfiguration;\nimport com.fasterxml.jackson.annotation.JsonProperty;\nimport io.budgetapp.model.Budget;\nimport io.budgetapp.model.Category;\nimport io.dropwizard.Configuration;\nimport io.dropwizard.db.DataSourceFactory;\n\nimport javax.validation.Valid;\nimport javax.validation.constraints.NotNull;\nimport java.util.ArrayList;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n *\n */\npublic class AppConfiguration extends Configuration implements AssetsBundleConfiguration {\n\n @Valid\n @NotNull\n private DataSourceFactory database = new DataSourceFactory();\n\n @Valid\n @NotNull\n private final AssetsConfiguration assets = new AssetsConfiguration();\n\n @Valid\n @NotNull\n private List categories = new ArrayList<>();\n\n @Valid\n @NotNull\n @JsonProperty(\"budgets\")\n private Map> budgets = new LinkedHashMap<>();\n\n public DataSourceFactory getDataSourceFactory() {\n return database;\n }\n\n @Override\n public AssetsConfiguration getAssetsConfiguration() {\n return assets;\n }\n\n @JsonProperty(\"assets\")\n public AssetsConfiguration getAssets() {\n return assets;\n }\n\n @JsonProperty(\"database\")\n public void setDatabase(DataSourceFactory database) {\n this.database = database;\n }\n\n @JsonProperty(\"categories\")\n public void setCategories(List categories) {\n this.categories = categories;\n }\n\n public void setBudgets(Map> budgets) {\n this.budgets = budgets;\n }\n\n public List getCategories() {\n return categories;\n }\n\n public Map> getBudgets() {\n return budgets;\n }\n}", "meta": {"content_hash": "6ac792ce4188a0e6b9d778c0cd448972", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 90, "avg_line_length": 25.0, "alnum_prop": 0.7168, "repo_name": "navneetkarnani/budgetapp", "id": "1728522cce034a4b84e09529b2528e03d9cc1287", "size": "1875", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/java/io/budgetapp/configuration/AppConfiguration.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "86255"}, {"name": "HTML", "bytes": "53279"}, {"name": "Java", "bytes": "154498"}, {"name": "JavaScript", "bytes": "61073"}, {"name": "Shell", "bytes": "174"}]}} {"text": "Usage Examples\n==============\n\nPika has various methods of use, between the synchronous BlockingConnection adapter and the various asynchronous connection adapter. The following examples illustrate the various ways that you can use Pika in your projects.\n\nPika\u6709\u5f88\u591a\u79cd\u4f7f\u7528\u65b9\u6cd5\uff0c\u5305\u62ecBlockingConnection\u9002\u914d\u5668\u548c\u5404\u79cd\u5f02\u6b65\u8fde\u63a5\u9002\u914d\u5668\u4e4b\u95f4\u3002\u4e0b\u9762\u7684\u4f8b\u5b50\u8bf4\u660e\uff0c\u4f60\u53ef\u4ee5\u5728\u4f60\u7684\u9879\u76ee\u4e2d\u4f7f\u7528Pika\u7684\u5404\u79cd\u65b9\u6cd5\u3002\n\n.. toctree::\n :glob:\n :maxdepth: 1\n\n examples/using_urlparameters\n examples/connecting_async\n examples/blocking_basic_get\n examples/blocking_consume\n examples/blocking_consumer_generator\n examples/comparing_publishing_sync_async\n examples/blocking_delivery_confirmations\n examples/blocking_publish_mandatory\n examples/asynchronous_consumer_example\n examples/asynchronous_publisher_example\n examples/twisted_example\n examples/tornado_consumer\n", "meta": {"content_hash": "784056a4f2ea0648a9a07b9fabd915e1", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 223, "avg_line_length": 35.04347826086956, "alnum_prop": 0.8014888337468983, "repo_name": "zixiliuyue/pika", "id": "1505adedff05b78446494407599aca3a2773def9", "size": "910", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/examples.rst", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Python", "bytes": "842302"}]}} {"text": "## m\n\n---\n\n- [Usage](#usage)\n- [Binding to data](#binding-to-data)\n- [Using HTML entities](#using-html-entities)\n- [Accessing the real DOM element](#accessing-the-real-dom-element)\n- [Persisting config data](#persisting-config-data)\n- [Destructors](#destructors)\n- [Persisting DOM elements across route changes](#persisting-dom-elements-across-route-changes)\n- [SVG](#svg)\n- [Dealing with focus](#dealing-with-focus)\n- [Dealing with sorting and deleting in lists](#dealing-with-sorting-and-deleting-in-lists)\n- [Component shorthand](#component-shorthand)\n- [Signature](#signature)\n- [The `config` attribute](#the-config-attribute)\n---\n\nThis is a convenience method to compose virtual elements that can be rendered via [`m.render()`](mithril.render.md).\n\nYou are encouraged to use CSS selectors to define virtual elements. See \"Signature\" section for details.\n\n---\n\n### Usage\n\nYou can use simple tag selectors to make templates resemble HTML:\n\n```javascript\nm(\"br\"); //yields a virtual element that represents
\n\nm(\"div\", \"Hello\"); //yields
Hello
\n\nm(\"div\", {class: \"container\"}, \"Hello\"); //yields
Hello
\n```\n\nNote that the output value from `m()` is not an actual DOM element. In order to turn the virtual element into a real DOM element, you must call [`m.render()`](mithril.render.md).\n\n```javascript\nm.render(document.body, m(\"br\")); //puts a
in \n```\n\nYou can also use more complex CSS selectors:\n\n```javascript\nm(\".container\"); //yields
\n\nm(\"#layout\"); //yields
\n\nm(\"a[name=top]\"); //yields \n\nm(\"[contenteditable]\"); //yields
\n\nm(\"a#google.external[href='http://google.com']\", \"Google\"); //yields Google\n```\n\nEach `m()` call creates a virtual DOM element, that is, a Javascript object that represents a DOM element, and which is eventually converted into one.\n\nYou can, of course, nest virtual elements:\n\n```javascript\nm(\"ul\", [\n\tm(\"li\", \"item 1\"),\n\tm(\"li\", \"item 2\"),\n]);\n\n/*\nyields\n
    \n\t
  • item 1
  • \n\t
  • item 2
  • \n
\n*/\n```\n\n---\n\nThe CSS selector syntax (e.g. `a#google.external[href='http://google.com']`) is meant to be used for declaring static attributes in the element, i.e. attribute values that don't change dynamically when the user interacts with the app.\n\nThe `attributes` argument (i.e. the second parameter in the `m(\"div\", {class: \"container\"}, \"Hello\")` example) is meant to be used for attributes whose values we want to dynamically populate.\n\nFor example, let's say that you're generating a link from an entry that comes from a web service:\n\n```javascript\n//assume the variable `link` came from a web service\nvar link = {url: \"http://google.com\", title: \"Google\"}\n\nm(\"a\", {href: link.url}, link.title); //yields Google\n```\n\nHere's a less trivial example:\n\n```javascript\nvar links = [\n {title: \"item 1\", url: \"/item1\"},\n {title: \"item 2\", url: \"/item2\"},\n {title: \"item 3\", url: \"/item3\"}\n];\n\nm.render(document.body, [\n m(\"ul.nav\", \n links.map(function(link) {\n return m(\"li\",\n m(\"a\", {href: link.url}, link.title) \n );\n })\n )\n]);\n```\n\nyields:\n\n```markup\n\n \n\n```\n\nAs you can see, flow control is done with vanilla Javascript. This allows the developer to abstract away any aspect of the template at will.\n\n---\n\nNote that you can use both Javascript property names and HTML attribute names to set values in the `attributes` argument, but you should pass a value of appropriate type. If an attribute has the same name in Javascript and in HTML, then Mithril assumes you're setting the Javascript property.\n\n```javascript\nm(\"div\", {class: \"widget\"}); //yields
\n\nm(\"div\", {className: \"widget\"}); //yields
\n\nm(\"button\", {onclick: alert}); //yields , which alerts its event argument when clicked\n\n//note this uses the Javascript syntax (uppercase \"O\") for `readonly`\n//in order to set the boolean javascript property instead of the HTML attribute\nm(\"input\", {readOnly: true}); //yields \n\n//using the HTML attribute name will call `setAttribute`, which may not be what you want\nm(\"input\", {readonly: false}); //yields , which is still readonly\n```\n\n---\n\nNote that you can use JSON syntax if the attribute name you are setting has non-alphanumeric characters:\n\n```javascript\nm(\"div\", {\"data-index\": 1}); //yields
\n```\n\nYou can set inline styles like this:\n\n```javascript\nm(\"div\", {style: {border: \"1px solid red\"}}); //yields
\n```\n\nNote that in order to keep the framework lean, Mithril does not auto-append units like `px` or `%` to any values. Typically, you should not even be using inline styles to begin with (unless you are dynamically changing them).\n\nMithril also does not auto-camel-case CSS properties on inline style attributes, so you should use the Javascript syntax when setting them via Javascript objects:\n\n```javascript\nm(\"div\", {style: {textAlign: \"center\"}}); //yields
\nm(\"div\", {style: {cssFloat: \"left\"}}); //yields
\n\n//this does not work\nm(\"div\", {style: {\"text-align\": \"center\"}});\nm(\"div\", {style: {float: \"left\"}});\n```\n\nYou can find the [Javascript syntax for all the CSS rules here](https://developer.mozilla.org/en-US/docs/Web/CSS/CSS_Properties_Reference).\n\nYou can, however, use CSS syntax when defining style rules as inline strings:\n\n```javascript\nm(\"div[style='text-align:center']\"); //yields
\n```\n\nOne caveat of using the CSS syntax is that it clobbers the `style` attribute in the DOM element on redraws, so this syntax is not appropriate if you need to use it in conjunction with 3rd party tools that modify the element's style outside of Mithril's templates (e.g. via `config`, which is explained below)\n\n---\n\n### Binding to data\n\nIn order to stay flexible, Mithril doesn't provide helpers for bi-directional bindings out of the box. However, bindings can be implemented easily:\n\n```javascript\n//a data store\nvar name = m.prop(\"\")\n\n//binding the data store in a view\nm(\"input\", {oninput: m.withAttr(\"value\", name), value: name()})\n```\n\nIn the code above, the `oninput` event handler updates the `name` getter-setter, and the Mithril auto-redrawing system redraws the template in order to update the displayed value. You can read more about the [`m.prop` getter-setter utility here](mithril.prop.md) and the [`m.withAttr` event handler factory here](mithril.withAttr.md). You can also [learn how the redrawing system works here](auto-redrawing.md).\n\nNote that Mithril always considers the model layer data to be canonical. This means that in the code below, the input on screen will overwritten by the model data any time a redraw happens:\n\n```javascript\n//note that we are not updating the value of the `name` getter-setter via an event handler\n//redraws will always overwrite the current UI value with the value of `name()`\nm(\"input\", {value: name()})\n```\n\nExpressiveness can be achieved using standard refactoring techniques:\n\n```javascript\n//refactor the binding to a simple helper\nvar binds = function(prop) {\n\treturn {oninput: m.withAttr(\"value\", prop), value: prop()}\n}\n\n//a data store\nvar name = m.prop(\"\")\n\n//binding the data store in a view\nm(\"input\", binds(name))\n```\n\nHere's an example of a more aggressive refactor:\n\n```javascript\n//refactor the binding to a simple helper\nvar input = function(prop) {\n\treturn m(\"input\", {oninput: m.withAttr(\"value\", prop), value: prop()})\n}\n\n//a data store\nvar name = m.prop(\"\")\n\n//binding the data store in a view\ninput(name)\n```\n\nAlternatively, you can also explore other techniques in order to achieve better [performance](http://lhorie.github.io/mithril-blog/asymmetrical-data-bindings.html) and [expressiveness](http://lhorie.github.io/mithril-blog/extending-the-view-language.html).\n\n---\n\n### Using HTML entities\n\nBy default, Mithril escapes HTML strings in order to help prevent XSS attacks.\n\n```javascript\nm(\"div\", \"×\") //becomes
&times;
\n```\n\nYou can unescape trusted HTML strings by using [`m.trust`](mithril.trust.md)\n\n```javascript\nm(\"div\", m.trust(\"×\")) //becomes
×
\n```\n\n---\n\n#### Accessing the real DOM element\n\nYou can define a non-HTML-standard attribute called `config`. This special parameter allows you to call methods on the DOM element after it gets created.\n\nThis is useful, for example, if you declare a `canvas` element and want to use the Javascript API to draw:\n\n```javascript\nfunction draw(element, isInitialized, context) {\n\t//don't redraw if we did once already\n\tif (isInitialized) return;\n\n\tvar ctx = element.getContext(\"2d\");\n\t/* draws stuff */\n}\n\nvar view = [\n\tm(\"canvas\", {config: draw})\n]\n\n//this creates the canvas element, and therefore, `isInitialized` is false\nm.render(document.body, view);\n\n//here, isInitialized is `true`\nm.render(document.body, view);\n```\n\nOne common way of using `config` is in conjunction with [`m.route`](mithril.route.md), which is an unobtrusive extension to links that allow Mithril's routing system to work transparently regardless of which routing mode is used.\n\n```javascript\n//this link can use any of Mithril's routing system modes\n//(i.e. it can use either the hash, the querystring or the pathname as the router implementation)\n//without needing to hard-code any syntax (`#` or `?`) in the `href` attribute.\nm(\"a[href='/dashboard']\", {config: m.route}, \"Dashboard\");\n```\n\nThe `config` mechanism can also be used to put focus on form inputs, and call methods that would not be possible to execute via the regular attribute syntax.\n\nAlso note that the `config` callback only runs after a rendering lifecycle is done. Therefore, you should not use `config` to modify controller and model values, if you expect these changes to render immediately. Changes to controller and model values in this fashion will only render on the next `m.render` or `m.mount` call.\n\nYou can use this mechanism to attach custom event listeners to controller methods (for example, when integrating with third party libraries), but you are responsible for making sure the integration with Mithril's autoredrawing system is in place. See the [integration guide](integration.md) for more information.\n\nYou can also use it to attach events to other elements (for example, `window.onresize`), but you should remove such event handlers via `ctx.onunload` to avoid surprises.\n\n---\n\n#### Persisting config data\n\nThe third argument for `config` allows you to map data to a virtual DOM element in a way that persists across redraws. This is useful when a `config` instantiates 3rd party classes and accesses the instance on redraws.\n\nThe example below shows a contrived redraw counter. In it, the count is stored in the context object and re-accessed on each redraw.\n\n```javascript\nfunction alertsRedrawCount(element, isInit, context) {\n\tif (!isInit) context.count = 0\n\talert(++context.count)\n}\n\nm(\"div\", {config: alertsRedrawCount})\n```\n\n---\n\n#### Destructors\n\nIf the `context` object that is passed to a `config` function has a property called `onunload`, this function will be called when the element gets detached from the document by Mithril's diff engine.\n\nThis is useful if there are cleanup tasks that need to be run when an element is destroyed (e.g. clearing `setTimeout`'s, etc)\n\n```javascript\nfunction unloadable(element, isInit, context) {\n\tcontext.timer = setTimeout(function() {\n\t\talert(\"timed out!\");\n\t}, 1000);\n\n\tcontext.onunload = function() {\n\t\tclearTimeout(context.timer);\n\t\tconsole.log(\"unloaded the div\");\n\t}\n};\n\nm.render(document, m(\"div\", {config: unloadable}));\n\nm.render(document, m(\"a\")); //logs `unloaded the div` and `alert` never gets called\n```\n\n---\n\n#### Persisting DOM elements across route changes\n\nWhen using the [router](mithril.route.md), a route change recreates the DOM tree from scratch in order to unload plugins from the previous page. If you want to keep a DOM element intact across a route change, you can set the `retain` flag in the config's context object.\n\nIn the example below, there are two routes, each of which loads a component when a user navigates to their respective URLs. Both components use a `menu` template, which contains links for navigation between the two components, and an expensive-to-reinitialize element. Setting `context.retain = true` in the element's config function allows the span to stay intact after a route change.\n\n```javascript\n//a menu template\nvar menu = function() {\n\treturn m(\"div\", [\n\t\tm(\"a[href='/']\", {config: m.route}, \"Home\"),\n\t\tm(\"a[href='/contact']\", {config: m.route}, \"Contact\"),\n\t\t//an expensive-to-initialize DOM element\n\t\tm(\"span\", {config: persistent})\n\t])\n}\n//a configuration that persists across route changes\nfunction persistent(el, isInit, context) {\n\tcontext.retain = true\n\t\n\tif (!isInit) {\n\t\t//only runs once, even if you move back and forth between `/` and `/contact`\n\t\tdoSomethingExpensive(el)\n\t}\n}\n\n//components that use the menu above\nvar Home = {\n\tcontroller: function() {},\n\tview: function() {\n\t\treturn m(\"div\", [\n\t\t\tmenu(),\n\t\t\tm(\"h1\", \"Home\")\n\t\t])\n\t}\n}\nvar Contact = {\n\tview: function() {\n\t\treturn m(\"div\", [\n\t\t\tmenu(),\n\t\t\tm(\"h2\", \"Contact\")\n\t\t])\n\t}\n}\n\nm.route(document.body, \"/\", {\n\t\"/\": Home,\n\t\"/contact\": Contact\n})\n```\n\nNote that even if you set `context.retain = true`, the element will still be destroyed and recreated if it is different enough from the existing element. An element is considered \"different enough\" if:\n\n- the tag name changes, or\n- the list of HTML attributes changes, or\n- the value of the element's id attribute changes\n\nIn addition, setting `context.retain = false` will also cause the element to be recreated, even if it is not considered different enough.\n\n---\n\n#### SVG\n\nYou can use Mithril to create SVG documents (as long as you don't need to support browsers that don't support SVG natively).\n\nMithril automatically figures out the correct XML namespaces when it sees an SVG island in the virtual DOM tree.\n\n```javascript\nm(\"svg[height='200px'][width='200px']\", [\n\tm(\"image[href='foo.jpg'][height='200px'][width='200px']\")\n])\n```\n\n---\n\n#### Dealing with focus\n\nThe virtual DOM diffing algorithm has a weakness: a naive diff is not aware of the identity of DOM elements. In practice, this means performing operations like shifting an item from the beginning of a list would cause every element in the list to be diffed and potentially recreated. Another side-effect is that UI state like input focus is not tracked correctly if the focused element moves around, and likewise, state for 3rd party plugins that are added via `config` can also end up in the wrong element.\n\nFortunately, with Mithril, it's possible for developers to attach an identity key to elements so that array operations like shift, splice and sort only affect the minimum amount of elements required, leaving the rest of the DOM elements untouched when a redraw happens. This allows us to maintain input focus and plugin state correctly.\n\nTo maintain the identities of DOM elements, you need to add a `key` property to the direct children of the array that you're planning to modify. The key for each child must be unique among a list of sibling DOM elements, but it does not need to be globally unique. Also, keys must be either strings or numbers.\n\n```javascript\nm(\"ul\", [\n\titems.map(function(item) {\n\t\treturn m(\"li\", {key: item.id}, [\n\t\t\tm(\"input\")\n\t\t]);\n\t})\n]);\n```\n\nIn the example above, input focus would be maintained correctly after a redraw even if `items` got sorted or reversed. The key is defined in the `li`, which is the closest element to the `items` array, not directly on the `input`, even though we want to track focus on the input.\n\nNote that in addition to the presence of the `key` attribute, diffing rules also apply in determining whether an element is recreated. Elements are recreated if either their node name changes, or if the list of attribute names change, or if the ID attribute changes. To avoid surprises, be sure to change only attribute values, using `undefined` or `null` as values if appropriate, rather than conditionally substituting attribute dictionaries altogether.\n\n```javascript\n//avoid using this idiom\nm(\"li\", selected ? {class: \"active\"} : {})\n\n//use this idiom instead\nm(\"li\", {class: selected ? \"active\" : \"\"})\n```\n\n---\n\n### Dealing with sorting and deleting in lists\n\nAs with input focus, we can maintain referential integrity between data in a list and the respective DOM representation by using keys.\n\n```javascript\nm(\"ul\", [\n\titems.map(function(item) {\n\t\treturn m(\"li\", {key: item.id}, [\n\t\t\tm(\"input\")\n\t\t]);\n\t})\n]);\n```\n\nYou should always use keys if you need to sort lists, remove items from them or splice them in any way.\n\n---\n\n### Component Shorthand\n\nIf the first argument to `m()` is a component, it acts as an alias of `m.component()`\n\n```javascript\nvar MyComponent = {\n\tcontroller: function() {\n\t\treturn {greeting: \"hello\"}\n\t},\n\tview: function(ctrl, args) {\n\t\treturn m(\"h1\", ctrl.greeting + \" \" + args.data)\n\t}\n}\n\nm.render(document.body, [\n\t//the two lines below are equivalent\n\tm(MyComponent, {data: \"world\"}),\n\tm.component(MyComponent, {data: \"world\"})\n])\n```\n\nSee [components](mithril.component.md) for more information.\n\n---\n\n### Signature\n\n[How to read signatures](how-to-read-signatures.md)\n\n```clike\nVirtualElement m(String selector [, Attributes attributes] [, Children... children])\n\nwhere:\n\tVirtualElement :: Object { String tag, Attributes attributes, Children children }\n Attributes :: Object\n\tChildren :: String text | VirtualElement virtualElement | Component | SubtreeDirective directive | Array\n\tComponent :: Object { Function? controller, Function view }\n\tSubtreeDirective :: Object { String subtree }\n```\n\n-\t**String selector**\n\n\tThis string should be a CSS rule that represents a DOM element.\n\n\tOnly tag, id, class and attribute selectors are supported.\n\n\tIf the tag selector is omitted, it defaults to `div`.\n\n\tNote that if the same attribute is defined in the both `selector` and `attributes` parameters, the value in `attributes` is used.\n\n\tFor developer convenience, Mithril makes an exception for the `class` attribute: if there are classes defined in both parameters, they are concatenated as a space separated list. It does not, however, de-dupe classes if the same class is declared twice.\n\n\t*Examples:*\n\n\t`\"div\"`\n\n\t`\"#container\"`\n\n\t`\".active\"`\n\n\t`\"[title='Application']\"`\n\n\t`\"div#container.active[title='Application']\"`\n\n\t`\".active#container\"`\n\n-\t**Attributes attributes** (optional)\n\n\tThis key-value map should define a list of HTML attributes and their respective values.\n\n\tYou can use both HTML and Javascript attribute names. For example, both `class` and `className` are valid.\n\n\tValues' types should match the expected type for the respective attribute.\n\n\tFor example, the value for `className` should be a string.\n\n\tWhen a attribute name expects different types for the value in HTML and Javascript, the Javascript type should be used.\n\n\tFor example, the value for the `onclick` attribute should be a function.\n\n\tSimilar, setting the value of attribute `readonly` to `false` is equivalent to removing the attribute in HTML.\n\n\tIt's also possible to set values to Javascript-only properties, such as `hash` in a `` element.\n\n\tNote that if the same attribute is defined in the both `selector` and `attributes` parameters, the value in `attributes` is used.\n\n\tFor developer convenience, Mithril makes an exception for the `class` attribute: if there are classes defined in both parameters, they are concatenated as a space separated list. It does not, however, de-dupe classes if the same class is declared twice.\n\n\t*Examples:*\n\n\t`{ title: \"Application\" }`\n\n\t`{ onclick: function(e) { /*do stuff*/ } }`\n\n\t`{ style: {border: \"1px solid red\"} }`\n\n-\t#### The `config` attribute\n\n\t**void config(DOMElement element, Boolean isInitialized, Object context, VirtualElement vdom)** (optional)\n\n\tYou can define a non-HTML-standard attribute called `config`. This special parameter allows you to call methods on the DOM element after it gets created.\n\n\tThis is useful, for example, if you declare a `canvas` element and want to use the Javascript API to draw:\n\n\t```javascript\n\tfunction draw(element, isInitialized) {\n\t\t//don't redraw if we did once already\n\t\tif (isInitialized) return;\n\n\t\tvar ctx = element.getContext(\"2d\");\n\t\t/* draws stuff */\n\t}\n\n\tvar view = [\n\t\tm(\"canvas\", {config: draw})\n\t]\n\n\t//this creates the canvas element, and therefore, `isInitialized` is false\n\tm.render(document.body, view);\n\n\t//here, isInitialized is `true`\n\tm.render(document.body, view);\n\t```\n\n\tOne common way of using `config` is in conjunction with [`m.route`](mithril.route.md), which is an unobtrusive extension to links that allow Mithril's routing system to work transparently regardless of which routing mode is used.\n\n\t```javascript\n\t//this link can use any of Mithril's routing system modes\n\t//(i.e. it can use either the hash, the querystring or the pathname as the router implementation)\n\t//without needing to hard-code any syntax (`#` or `?`) in the `href` attribute.\n\tm(\"a[href='/dashboard']\", {config: m.route}, \"Dashboard\");\n\t```\n\n\tThe `config` mechanism can also be used to put focus on form inputs, and call methods that would not be possible to execute via the regular attribute syntax.\n\n\tAlso note that the `config` callback only runs after a rendering lifecycle is done. Therefore, you should not use `config` to modify controller and model values, if you expect these changes to render immediately. Changes to controller and model values in this fashion will only render on the next `m.render` or `m.mount` call.\n\n\tYou can use this mechanism to attach custom event listeners to controller methods (for example, when integrating with third party libraries), but you are responsible for making sure the integration with Mithril's autoredrawing system is in place. See the [integration guide](integration.md) for more information.\n\t\n\tYou can also use it to attach events to other elements (for example, `window.onresize`), but you should remove such event handlers via `ctx.onunload` to avoid surprises.\n\n\t-\t**DOMElement element**\n\n\tThe DOM element that corresponds to virtual element defined by the `m()` call.\n\n\t-\t**Boolean isInitialized**\n\n\tWhether this is the first time we are running this function on this element. This flag is false the first time it runs on an element, and true on redraws that happen after the element has been created.\n\n\t-\t**Object context**\n\n\tAn object that retains its state across redraws. It can be used to store instances of 3rd party classes that need to be accessed more than one time throughout the lifecycle of a page.\n\n\tThe example below shows a contrived redraw counter. In it, the count is stored in the context object and re-accessed on each redraw.\n\n\t```javascript\n\tfunction alertsRedrawCount(element, isInit, context) {\n\t\tif (!isInit) context.count = 0\n\t\talert(++context.count)\n\t}\n\n\tm(\"div\", {config: alertsRedrawCount})\n\t```\n\n\tIf the `context` object that is passed to a `config` function has a property called `onunload`, this function will be called when the element gets detached from the document by Mithril's diff engine.\n\n\tThis is useful if there are cleanup tasks that need to be run when an element is destroyed (e.g. clearing `setTimeout`'s, etc)\n\n\t```javascript\n\tfunction unloadable(element, isInit, context) {\n\t\tcontext.timer = setTimeout(function() {\n\t\t\talert(\"timed out!\");\n\t\t}, 1000);\n\n\t\tcontext.onunload = function() {\n\t\t\tclearTimeout(context.timer);\n\t\t\tconsole.log(\"unloaded the div\");\n\t\t}\n\t};\n\n\tm.render(document, m(\"div\", {config: unloadable}));\n\n\tm.render(document, m(\"a\")); //logs `unloaded the div` and `alert` never gets called\n\t```\n\t\n\t-\t**VirtualElement vdom**\n\t\n\tThe virtual DOM element to which the `config` function is attached\n\n-\t**Children children** (optional)\n\n\tIf this argument is a string, it will be rendered as a text node. To render a string as HTML, see [`m.trust`](mithril.trust.md)\n\n\tIf it's a VirtualElement, it will be rendered as a DOM Element.\n\n\tIf it's a [component](mithril.component.md), the component will be instantiated and managed internally by Mithril\n\t\n\tIf it's a list, its contents will recursively be rendered as appropriate and appended as children of the element being created.\n\n\tIf it's a SubtreeDirective with the value \"retain\", it will retain the existing DOM tree in place, if any. See [subtree directives.md](mithril.render.md#subtree-directives) for more information.\n\n-\t**returns** VirtualElement\n\n\tThe returned VirtualElement is a Javascript data structure that represents the DOM element to be rendered by [`m.render`](mithril.render.md)\n", "meta": {"content_hash": "019c130827cf9dff2398348ed5027c90", "timestamp": "", "source": "github", "line_count": 671, "max_line_length": 507, "avg_line_length": 37.58867362146051, "alnum_prop": 0.7245262072793592, "repo_name": "futurist/mithril_0.2.x", "id": "10fa215b7b93b08321e418fc6db337e613bbfb72", "size": "25222", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/mithril.md", "mode": "33261", "license": "mit", "language": [{"name": "CSS", "bytes": "36786"}, {"name": "HTML", "bytes": "2914489"}, {"name": "JavaScript", "bytes": "925918"}]}} {"text": " env('APP_ENV', 'production'),\n\n /*\n |--------------------------------------------------------------------------\n | Application Debug Mode\n |--------------------------------------------------------------------------\n |\n | When your application is in debug mode, detailed error messages with\n | stack traces will be shown on every error that occurs within your\n | application. If disabled, a simple generic error page is shown.\n |\n */\n\n 'debug' => env('APP_DEBUG', true),\n\n /*\n |--------------------------------------------------------------------------\n | Application URL\n |--------------------------------------------------------------------------\n |\n | This URL is used by the console to properly generate URLs when using\n | the Artisan command line tool. You should set this to the root of\n | your application so that it is used when running Artisan tasks.\n |\n */\n\n 'url' => env('APP_URL', 'http://localhost'),\n\n /*\n |--------------------------------------------------------------------------\n | Application Timezone\n |--------------------------------------------------------------------------\n |\n | Here you may specify the default timezone for your application, which\n | will be used by the PHP date and date-time functions. We have gone\n | ahead and set this to a sensible default for you out of the box.\n |\n */\n\n 'timezone' => 'UTC',\n\n /*\n |--------------------------------------------------------------------------\n | Application Locale Configuration\n |--------------------------------------------------------------------------\n |\n | The application locale determines the default locale that will be used\n | by the translation service provider. You are free to set this value\n | to any of the locales which will be supported by the application.\n |\n */\n\n 'locale' => 'en',\n\n /*\n |--------------------------------------------------------------------------\n | Application Fallback Locale\n |--------------------------------------------------------------------------\n |\n | The fallback locale determines the locale to use when the current one\n | is not available. You may change the value to correspond to any of\n | the language folders that are provided through your application.\n |\n */\n\n 'fallback_locale' => 'en',\n\n /*\n |--------------------------------------------------------------------------\n | Encryption Key\n |--------------------------------------------------------------------------\n |\n | This key is used by the Illuminate encrypter service and should be set\n | to a random, 32 character string, otherwise these encrypted strings\n | will not be safe. Please do this before deploying an application!\n |\n */\n\n 'key' => env('APP_KEY'),\n\n 'cipher' => 'AES-256-CBC',\n\n /*\n |--------------------------------------------------------------------------\n | Logging Configuration\n |--------------------------------------------------------------------------\n |\n | Here you may configure the log settings for your application. Out of\n | the box, Laravel uses the Monolog PHP logging library. This gives\n | you a variety of powerful log handlers / formatters to utilize.\n |\n | Available Settings: \"single\", \"daily\", \"syslog\", \"errorlog\"\n |\n */\n\n 'log' => env('APP_LOG', 'daily'),\n\n /*\n |--------------------------------------------------------------------------\n | Autoloaded Service Providers\n |--------------------------------------------------------------------------\n |\n | The service providers listed here will be automatically loaded on the\n | request to your application. Feel free to add your own services to\n | this array to grant expanded functionality to your applications.\n |\n */\n\n 'providers' => [\n\n /*\n * Laravel Framework Service Providers...\n */\n Illuminate\\Auth\\AuthServiceProvider::class,\n Illuminate\\Broadcasting\\BroadcastServiceProvider::class,\n Illuminate\\Bus\\BusServiceProvider::class,\n Illuminate\\Cache\\CacheServiceProvider::class,\n Illuminate\\Foundation\\Providers\\ConsoleSupportServiceProvider::class,\n Illuminate\\Cookie\\CookieServiceProvider::class,\n Illuminate\\Database\\DatabaseServiceProvider::class,\n Illuminate\\Encryption\\EncryptionServiceProvider::class,\n Illuminate\\Filesystem\\FilesystemServiceProvider::class,\n Illuminate\\Foundation\\Providers\\FoundationServiceProvider::class,\n Illuminate\\Hashing\\HashServiceProvider::class,\n Illuminate\\Mail\\MailServiceProvider::class,\n Illuminate\\Pagination\\PaginationServiceProvider::class,\n Illuminate\\Pipeline\\PipelineServiceProvider::class,\n Illuminate\\Queue\\QueueServiceProvider::class,\n Illuminate\\Redis\\RedisServiceProvider::class,\n Illuminate\\Auth\\Passwords\\PasswordResetServiceProvider::class,\n Illuminate\\Session\\SessionServiceProvider::class,\n Illuminate\\Translation\\TranslationServiceProvider::class,\n Illuminate\\Validation\\ValidationServiceProvider::class,\n Illuminate\\View\\ViewServiceProvider::class,\n\n /*\n * Application Service Providers...\n */\n App\\Providers\\AppServiceProvider::class,\n App\\Providers\\AuthServiceProvider::class,\n App\\Providers\\EventServiceProvider::class,\n App\\Providers\\RouteServiceProvider::class,\n\n /*\n * Entrust Service Provider\n */\n Zizaco\\Entrust\\EntrustServiceProvider::class,\n\n /*\n * LaravelCollective Html Form & Html\n */\n Collective\\Html\\HtmlServiceProvider::class,\n Laravel\\Socialite\\SocialiteServiceProvider::class,\n ],\n\n /*\n |--------------------------------------------------------------------------\n | Class Aliases\n |--------------------------------------------------------------------------\n |\n | This array of class aliases will be registered when this application\n | is started. However, feel free to register as many as you wish as\n | the aliases are \"lazy\" loaded so they don't hinder performance.\n |\n */\n\n 'aliases' => [\n\n 'App' => Illuminate\\Support\\Facades\\App::class,\n 'Artisan' => Illuminate\\Support\\Facades\\Artisan::class,\n 'Auth' => Illuminate\\Support\\Facades\\Auth::class,\n 'Blade' => Illuminate\\Support\\Facades\\Blade::class,\n 'Cache' => Illuminate\\Support\\Facades\\Cache::class,\n 'Config' => Illuminate\\Support\\Facades\\Config::class,\n 'Cookie' => Illuminate\\Support\\Facades\\Cookie::class,\n 'Crypt' => Illuminate\\Support\\Facades\\Crypt::class,\n 'DB' => Illuminate\\Support\\Facades\\DB::class,\n 'Eloquent' => Illuminate\\Database\\Eloquent\\Model::class,\n 'Event' => Illuminate\\Support\\Facades\\Event::class,\n 'File' => Illuminate\\Support\\Facades\\File::class,\n 'Gate' => Illuminate\\Support\\Facades\\Gate::class,\n 'Hash' => Illuminate\\Support\\Facades\\Hash::class,\n 'Lang' => Illuminate\\Support\\Facades\\Lang::class,\n 'Log' => Illuminate\\Support\\Facades\\Log::class,\n 'Mail' => Illuminate\\Support\\Facades\\Mail::class,\n 'Password' => Illuminate\\Support\\Facades\\Password::class,\n 'Queue' => Illuminate\\Support\\Facades\\Queue::class,\n 'Redirect' => Illuminate\\Support\\Facades\\Redirect::class,\n 'Redis' => Illuminate\\Support\\Facades\\Redis::class,\n 'Request' => Illuminate\\Support\\Facades\\Request::class,\n 'Response' => Illuminate\\Support\\Facades\\Response::class,\n 'Route' => Illuminate\\Support\\Facades\\Route::class,\n 'Schema' => Illuminate\\Support\\Facades\\Schema::class,\n 'Session' => Illuminate\\Support\\Facades\\Session::class,\n 'Storage' => Illuminate\\Support\\Facades\\Storage::class,\n 'URL' => Illuminate\\Support\\Facades\\URL::class,\n 'Validator' => Illuminate\\Support\\Facades\\Validator::class,\n 'View' => Illuminate\\Support\\Facades\\View::class,\n\n /*\n * Entrust alias\n */\n 'Entrust' => Zizaco\\Entrust\\EntrustFacade::class,\n\n /*\n * LaravelCollective Html Form & Html\n */\n 'Form' => Collective\\Html\\FormFacade::class,\n 'Html' => Collective\\Html\\HtmlFacade::class,\n\n 'Socialite' => Laravel\\Socialite\\Facades\\Socialite::class,\n\n ],\n];\n", "meta": {"content_hash": "2d5ee0dc643fd6d7e0737c071b27f42b", "timestamp": "", "source": "github", "line_count": 229, "max_line_length": 79, "avg_line_length": 38.88209606986899, "alnum_prop": 0.5367250673854448, "repo_name": "pviswanath/New-Cassel-first-code-base", "id": "b273d4c18f15bc1087bbb33b74a20ef06029c369", "size": "8904", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config/app.php", "mode": "33261", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "553"}, {"name": "CSS", "bytes": "4027"}, {"name": "HTML", "bytes": "67156"}, {"name": "JavaScript", "bytes": "15056"}, {"name": "PHP", "bytes": "162829"}]}} {"text": "_mapFileToCase($file, $category, $throwOnMissingFile);\n\t}\n\n\tpublic function mapFileToCategory($file) {\n\t\treturn $this->_mapFileToCategory($file);\n\t}\n\n}\n\n/**\n * TestShellTest\n *\n * @package Cake.Test.Case.Console.Command\n */\nclass TestShellTest extends CakeTestCase {\n\n/**\n * setUp test case\n *\n * @return void\n */\n\tpublic function setUp() {\n\t\tparent::setUp();\n\t\t$out = $this->getMock('ConsoleOutput', array(), array(), '', false);\n\t\t$in = $this->getMock('ConsoleInput', array(), array(), '', false);\n\n\t\t$this->Shell = $this->getMock(\n\t\t\t'TestTestShell',\n\t\t\tarray('in', 'out', 'hr', 'help', 'error', 'err', '_stop', 'initialize', '_run', 'clear'),\n\t\t\tarray($out, $out, $in)\n\t\t);\n\t\t$this->Shell->OptionParser = $this->getMock('ConsoleOptionParser', array(), array(null, false));\n\t}\n\n/**\n * tearDown method\n *\n * @return void\n */\n\tpublic function tearDown() {\n\t\tparent::tearDown();\n\t\tunset($this->Dispatch, $this->Shell);\n\t}\n\n/**\n * testMapCoreFileToCategory\n *\n * @return void\n */\n\tpublic function testMapCoreFileToCategory() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCategory('lib/Cake/basics.php');\n\t\t$this->assertSame('core', $return);\n\n\t\t$return = $this->Shell->mapFileToCategory('lib/Cake/Core/App.php');\n\t\t$this->assertSame('core', $return);\n\n\t\t$return = $this->Shell->mapFileToCategory('lib/Cake/Some/Deeply/Nested/Structure.php');\n\t\t$this->assertSame('core', $return);\n\t}\n\n/**\n * testMapCoreFileToCase\n *\n * basics.php is a slightly special case - it's the only file in the core with a test that isn't Capitalized\n *\n * @return void\n */\n\tpublic function testMapCoreFileToCase() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCase('lib/Cake/basics.php', 'core');\n\t\t$this->assertSame('Basics', $return);\n\n\t\t$return = $this->Shell->mapFileToCase('lib/Cake/Core/App.php', 'core');\n\t\t$this->assertSame('Core/App', $return);\n\n\t\t$return = $this->Shell->mapFileToCase('lib/Cake/Some/Deeply/Nested/Structure.php', 'core', false);\n\t\t$this->assertSame('Some/Deeply/Nested/Structure', $return);\n\t}\n\n/**\n * testMapAppFileToCategory\n *\n * @return void\n */\n\tpublic function testMapAppFileToCategory() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCategory(APP . 'Controller/ExampleController.php');\n\t\t$this->assertSame('app', $return);\n\n\t\t$return = $this->Shell->mapFileToCategory(APP . 'My/File/Is/Here.php');\n\t\t$this->assertSame('app', $return);\n\t}\n\n/**\n * testMapAppFileToCase\n *\n * @return void\n */\n\tpublic function testMapAppFileToCase() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCase(APP . 'Controller/ExampleController.php', 'app', false);\n\t\t$this->assertSame('Controller/ExampleController', $return);\n\n\t\t$return = $this->Shell->mapFileToCase(APP . 'My/File/Is/Here.php', 'app', false);\n\t\t$this->assertSame('My/File/Is/Here', $return);\n\t}\n\n/**\n * testMapPluginFileToCategory\n *\n * @return void\n */\n\tpublic function testMapPluginFileToCategory() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCategory(APP . 'Plugin/awesome/Controller/ExampleController.php');\n\t\t$this->assertSame('awesome', $return);\n\n\t\t$return = $this->Shell->mapFileToCategory(dirname(CAKE) . 'plugins/awesome/Controller/ExampleController.php');\n\t\t$this->assertSame('awesome', $return);\n\t}\n\n/**\n * testMapPluginFileToCase\n *\n * @return void\n */\n\tpublic function testMapPluginFileToCase() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCase(APP . 'Plugin/awesome/Controller/ExampleController.php', 'awesome', false);\n\t\t$this->assertSame('Controller/ExampleController', $return);\n\n\t\t$return = $this->Shell->mapFileToCase(dirname(CAKE) . 'plugins/awesome/Controller/ExampleController.php', 'awesome', false);\n\t\t$this->assertSame('Controller/ExampleController', $return);\n\t}\n\n/**\n * testMapCoreTestToCategory\n *\n * @return void\n */\n\tpublic function testMapCoreTestToCategory() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCategory('lib/Cake/Test/Case/BasicsTest.php');\n\t\t$this->assertSame('core', $return);\n\n\t\t$return = $this->Shell->mapFileToCategory('lib/Cake/Test/Case/BasicsTest.php');\n\t\t$this->assertSame('core', $return);\n\n\t\t$return = $this->Shell->mapFileToCategory('lib/Cake/Test/Case/Some/Deeply/Nested/StructureTest.php');\n\t\t$this->assertSame('core', $return);\n\t}\n\n/**\n * testMapCoreTestToCase\n *\n * basics.php is a slightly special case - it's the only file in the core with a test that isn't Capitalized\n *\n * @return void\n */\n\tpublic function testMapCoreTestToCase() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCase('lib/Cake/Test/Case/BasicsTest.php', 'core');\n\t\t$this->assertSame('Basics', $return);\n\n\t\t$return = $this->Shell->mapFileToCase('lib/Cake/Test/Case/Core/AppTest.php', 'core');\n\t\t$this->assertSame('Core/App', $return);\n\n\t\t$return = $this->Shell->mapFileToCase('lib/Cake/Test/Case/Some/Deeply/Nested/StructureTest.php', 'core', false);\n\t\t$this->assertSame('Some/Deeply/Nested/Structure', $return);\n\t}\n\n/**\n * testMapAppTestToCategory\n *\n * @return void\n */\n\tpublic function testMapAppTestToCategory() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCategory(APP . 'Test/Case/Controller/ExampleControllerTest.php');\n\t\t$this->assertSame('app', $return);\n\n\t\t$return = $this->Shell->mapFileToCategory(APP . 'Test/Case/My/File/Is/HereTest.php');\n\t\t$this->assertSame('app', $return);\n\t}\n\n/**\n * testMapAppTestToCase\n *\n * @return void\n */\n\tpublic function testMapAppTestToCase() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCase(APP . 'Test/Case/Controller/ExampleControllerTest.php', 'app', false);\n\t\t$this->assertSame('Controller/ExampleController', $return);\n\n\t\t$return = $this->Shell->mapFileToCase(APP . 'Test/Case/My/File/Is/HereTest.php', 'app', false);\n\t\t$this->assertSame('My/File/Is/Here', $return);\n\t}\n\n/**\n * testMapPluginTestToCategory\n *\n * @return void\n */\n\tpublic function testMapPluginTestToCategory() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCategory(APP . 'Plugin/awesome/Test/Case/Controller/ExampleControllerTest.php');\n\t\t$this->assertSame('awesome', $return);\n\n\t\t$return = $this->Shell->mapFileToCategory(dirname(CAKE) . 'plugins/awesome/Test/Case/Controller/ExampleControllerTest.php');\n\t\t$this->assertSame('awesome', $return);\n\t}\n\n/**\n * testMapPluginTestToCase\n *\n * @return void\n */\n\tpublic function testMapPluginTestToCase() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCase(APP . 'Plugin/awesome/Test/Case/Controller/ExampleControllerTest.php', 'awesome', false);\n\t\t$this->assertSame('Controller/ExampleController', $return);\n\n\t\t$return = $this->Shell->mapFileToCase(dirname(CAKE) . 'plugins/awesome/Test/Case/Controller/ExampleControllerTest.php', 'awesome', false);\n\t\t$this->assertSame('Controller/ExampleController', $return);\n\t}\n\n/**\n * testMapNotTestToNothing\n *\n * @return void\n */\n\tpublic function testMapNotTestToNothing() {\n\t\t$this->Shell->startup();\n\n\t\t$return = $this->Shell->mapFileToCategory(APP . 'Test/Case/NotATestFile.php');\n\t\t$this->assertSame('app', $return);\n\n\t\t$return = $this->Shell->mapFileToCase(APP . 'Test/Case/NotATestFile.php', false, false);\n\t\t$this->assertFalse($return);\n\n\t\t$return = $this->Shell->mapFileToCategory(APP . 'Test/Fixture/SomeTest.php');\n\t\t$this->assertSame('app', $return);\n\n\t\t$return = $this->Shell->mapFileToCase(APP . 'Test/Fixture/SomeTest.php', false, false);\n\t\t$this->assertFalse($return);\n\t}\n\n/**\n * test available list of test cases for an empty category\n *\n * @return void\n */\n\tpublic function testAvailableWithEmptyList() {\n\t\t$this->Shell->startup();\n\t\t$this->Shell->args = array('unexistant-category');\n\t\t$this->Shell->expects($this->at(0))->method('out')->with(__d('cake_console', \"No test cases available \\n\\n\"));\n\t\t$this->Shell->OptionParser->expects($this->once())->method('help');\n\t\t$this->Shell->available();\n\t}\n\n/**\n * test available list of test cases for core category\n *\n * @return void\n */\n\tpublic function testAvailableCoreCategory() {\n\t\t$this->Shell->startup();\n\t\t$this->Shell->args = array('core');\n\t\t$this->Shell->expects($this->at(0))->method('out')->with('Core Test Cases:');\n\t\t$this->Shell->expects($this->at(1))->method('out')\n\t\t\t->with($this->stringContains('[1]'));\n\t\t$this->Shell->expects($this->at(2))->method('out')\n\t\t\t->with($this->stringContains('[2]'));\n\n\t\t$this->Shell->expects($this->once())->method('in')\n\t\t\t->with(__d('cake_console', 'What test case would you like to run?'), null, 'q')\n\t\t\t->will($this->returnValue('1'));\n\n\t\t$this->Shell->expects($this->once())->method('_run');\n\t\t$this->Shell->available();\n\t\t$this->assertEquals(array('core', 'AllBehaviors'), $this->Shell->args);\n\t}\n\n/**\n * Tests that correct option for test runner are passed\n *\n * @return void\n */\n\tpublic function testRunnerOptions() {\n\t\t$this->Shell->startup();\n\t\t$this->Shell->args = array('core', 'Basics');\n\t\t$this->Shell->params = array('filter' => 'myFilter', 'colors' => true, 'verbose' => true);\n\n\t\t$this->Shell->expects($this->once())->method('_run')\n\t\t\t->with(\n\t\t\t\tarray('app' => false, 'plugin' => null, 'core' => true, 'output' => 'text', 'case' => 'Basics'),\n\t\t\t\tarray('--filter', 'myFilter', '--colors', '--verbose')\n\t\t\t);\n\t\t$this->Shell->main();\n\t}\n\n/**\n * Tests that the 'quiet' parameter gets swallowed before calling PHPUnit\n *\n * @return void\n */\n\tpublic function testRunnerOptionsQuiet() {\n\t\t$this->Shell->startup();\n\t\t$this->Shell->args = array('core', 'Basics');\n\t\t$this->Shell->params = array('quiet' => true);\n\n\t\t$this->Shell->expects($this->once())->method('_run')\n\t\t\t->with(\n\t\t\t\tarray('app' => false, 'plugin' => null, 'core' => true, 'output' => 'text', 'case' => 'Basics'),\n\t\t\t\tarray('--colors')\n\t\t\t);\n\t\t$this->Shell->main();\n\t}\n\n/**\n * Tests that the '--directive' parameter change to '-d' before calling PHPUnit\n *\n * @return void\n */\n\tpublic function testRunnerOptionsDirective() {\n\t\t$this->Shell->startup();\n\t\t$this->Shell->args = array('core', 'Basics');\n\t\t$this->Shell->params = array('directive' => 'memory_limit=128M');\n\n\t\t$this->Shell->expects($this->once())->method('_run')\n\t\t\t->with(\n\t\t\t\tarray('app' => false, 'plugin' => null, 'core' => true, 'output' => 'text', 'case' => 'Basics'),\n\t\t\t\tarray('-d', 'memory_limit=128M', '--colors')\n\t\t\t);\n\t\t$this->Shell->main();\n\t}\n}\n", "meta": {"content_hash": "6a24b1b49f2293b93258f70726a16841", "timestamp": "", "source": "github", "line_count": 365, "max_line_length": 140, "avg_line_length": 28.854794520547944, "alnum_prop": 0.66473604253703, "repo_name": "ibrsp/comanage-registry", "id": "7dfe63c27f2aa80931528d4553f4728ad271f522", "size": "11209", "binary": false, "copies": "25", "ref": "refs/heads/master", "path": "lib/Cake/Test/Case/Console/Command/TestShellTest.php", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2824"}, {"name": "CSS", "bytes": "480345"}, {"name": "HTML", "bytes": "32588"}, {"name": "JavaScript", "bytes": "267012"}, {"name": "PHP", "bytes": "13272572"}, {"name": "Shell", "bytes": "5430"}, {"name": "XSLT", "bytes": "808"}]}} {"text": " array());\n\n\t/**\n\t * IO channel listeners\n\t * @var array\n\t */\n\tprotected $ioListeners = array();\n\n\t#region \"Event channel\"\n\n\t/**\n\t * Dispatch new event through Event channel\n\t * @param $eventName\n\t * @param Event $event\n\t */\n\tpublic function dispatch($eventName, Event $event) {\n\t\t$listeners = $this->listeners['*'];\n\n\t\tif (array_key_exists($eventName, $this->listeners)) {\n\t\t\t$listeners = array_merge($listeners, $this->listeners[$eventName]);\n\t\t}\n\n\t\tif (!$listeners)\n\t\t\treturn;\n\n\t\t$event->setName($eventName);\n\n\t\tforeach ($listeners as $listener) {\n\t\t\tcall_user_func($listener, $event);\n\n\t\t\tif ($event->isPropagationStopped()) {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Add new listener to Event channel\n\t * @param $eventName\n\t * @param $callback\n\t */\n\tpublic function addListener($eventName, $callback) {\n\t\t$eventName = (string) $eventName;\n\t\tif (!array_key_exists($eventName, $this->listeners))\n\t\t\t$this->listeners[$eventName] = array();\n\n\t\t$this->listeners[$eventName][] = $callback;\n\t}\n\n\t#endregion\n\n\t#region \"IO channel\"\n\n\t/**\n\t * Dispatch event through IO channel\n\t * @param $ioId\n\t */\n\tpublic function dispatchIO($ioId) {\n\t\tif (!isset($this->ioListeners[$ioId]))\n\t\t\treturn;\n\n\t\tforeach($this->ioListeners[$ioId] as $item) {\n\t\t\t/** @var $item ActiveConfigurationItem */\n\t\t\t$item->receiveInput($ioId);\n\t\t}\n\t}\n\n\t/**\n\t * Add new listener to IO channel\n\t * @param $ioId\n\t * @param ActiveConfigurationItem $item\n\t */\n\tpublic function addIOListener($ioId, ActiveConfigurationItem $item) {\n\t\tif (!isset($this->ioListeners[$ioId]))\n\t\t\t$this->ioListeners[$ioId] = array();\n\n\t\t$this->ioListeners[$ioId][] = $item;\n\t}\n\n\t#endregion\n}", "meta": {"content_hash": "430c779bfaabb90858ed23d6b6db9551", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 74, "avg_line_length": 20.41176470588235, "alnum_prop": 0.6575408261287223, "repo_name": "petrolep/itilsimulator", "id": "610590efe72606475569b94a8140a9b99b1b66e9", "size": "2082", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "application/app/model/Runtime/Events/EventManager.php", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "JavaScript", "bytes": "1336953"}, {"name": "PHP", "bytes": "697825"}, {"name": "Shell", "bytes": "68"}]}} {"text": "/*\n * This file is derived from various .h and .c files from the zlib-1.0.4\n * distribution by Jean-loup Gailly and Mark Adler, with some additions\n * by Paul Mackerras to aid in implementing Deflate compression and\n * decompression for PPP packets. See zlib.h for conditions of\n * distribution and use.\n *\n * Changes that have been made include:\n * - added Z_PACKET_FLUSH (see zlib.h for details)\n * - added inflateIncomp and deflateOutputPending\n * - allow strm->next_out to be NULL, meaning discard the output\n *\n * $FreeBSD: soc2013/dpl/head/sys/net/zlib.c 246301 2013-01-06 14:59:59Z peter $\n */\n\n/* \n * ==FILEVERSION 971210==\n *\n * This marker is used by the Linux installation script to determine\n * whether an up-to-date version of this file is already installed.\n */\n\n#define NO_DUMMY_DECL\n#define NO_ZCFUNCS\n#define MY_ZCALLOC\n\n#if defined(__FreeBSD__) && defined(_KERNEL)\n#define\t_tr_init\t\t_zlib104_tr_init\n#define\t_tr_align\t\t_zlib104_tr_align\n#define\t_tr_tally\t\t_zlib104_tr_tally\n#define\t_tr_flush_block\t\t_zlib104_tr_flush_block\n#define\t_tr_stored_block\t_zlib104_tr_stored_block\n#define\tinflate_fast\t\t_zlib104_inflate_fast\n#define\tinflate\t\t\t_zlib104_inflate\n#define\tzlibVersion\t\t_zlib104_Version\n#endif\n\n\n/* +++ zutil.h */\n/*-\n * zutil.h -- internal interface and configuration of the compression library\n * Copyright (C) 1995-1996 Jean-loup Gailly.\n * For conditions of distribution and use, see copyright notice in zlib.h\n */\n\n/* WARNING: this file should *not* be used by applications. It is\n part of the implementation of the compression library and is\n subject to change. Applications should only use zlib.h.\n */\n\n/* From: zutil.h,v 1.16 1996/07/24 13:41:13 me Exp $ */\n\n#ifndef _Z_UTIL_H\n#define _Z_UTIL_H\n\n#ifdef _KERNEL\n#include \n#else\n#include \"zlib.h\"\n#endif\n\n#ifdef _KERNEL\n/* Assume this is a *BSD or SVR4 kernel */\n#include \n#include \n#include \n#include \n#include \n#include \n# define HAVE_MEMCPY\n\n#else\n#if defined(__KERNEL__)\n/* Assume this is a Linux kernel */\n#include \n#define HAVE_MEMCPY\n\n#else /* not kernel */\n\n#if defined(MSDOS)||defined(VMS)||defined(CRAY)||defined(WIN32)||defined(RISCOS)\n# include \n# include \n#else\n extern int errno;\n#endif\n#ifdef STDC\n# include \n# include \n#endif\n#endif /* __KERNEL__ */\n#endif /* _KERNEL */\n\n#ifndef local\n# define local static\n#endif\n/* compile with -Dlocal if your debugger can't find static symbols */\n\ntypedef unsigned char uch;\ntypedef uch FAR uchf;\ntypedef unsigned short ush;\ntypedef ush FAR ushf;\ntypedef unsigned long ulg;\n\nstatic const char *z_errmsg[10]; /* indexed by 2-zlib_error */\n/* (size given to avoid silly warnings with Visual C++) */\n\n#define ERR_MSG(err) z_errmsg[Z_NEED_DICT-(err)]\n\n#define ERR_RETURN(strm,err) \\\n return (strm->msg = (const char*)ERR_MSG(err), (err))\n/* To be used only when the state is known to be valid */\n\n /* common constants */\n\n#ifndef DEF_WBITS\n# define DEF_WBITS MAX_WBITS\n#endif\n/* default windowBits for decompression. MAX_WBITS is for compression only */\n\n#if MAX_MEM_LEVEL >= 8\n# define DEF_MEM_LEVEL 8\n#else\n# define DEF_MEM_LEVEL MAX_MEM_LEVEL\n#endif\n/* default memLevel */\n\n#define STORED_BLOCK 0\n#define STATIC_TREES 1\n#define DYN_TREES 2\n/* The three kinds of block type */\n\n#define MIN_MATCH 3\n#define MAX_MATCH 258\n/* The minimum and maximum match lengths */\n\n#define PRESET_DICT 0x20 /* preset dictionary flag in zlib header */\n\n /* target dependencies */\n\n#ifdef MSDOS\n# define OS_CODE 0x00\n# ifdef __TURBOC__\n# include \n# else /* MSC or DJGPP */\n# include \n# endif\n#endif\n\n#ifdef OS2\n# define OS_CODE 0x06\n#endif\n\n#ifdef WIN32 /* Window 95 & Windows NT */\n# define OS_CODE 0x0b\n#endif\n\n#if defined(VAXC) || defined(VMS)\n# define OS_CODE 0x02\n# define FOPEN(name, mode) \\\n fopen((name), (mode), \"mbc=60\", \"ctx=stm\", \"rfm=fix\", \"mrs=512\")\n#endif\n\n#ifdef AMIGA\n# define OS_CODE 0x01\n#endif\n\n#if defined(ATARI) || defined(atarist)\n# define OS_CODE 0x05\n#endif\n\n#ifdef MACOS\n# define OS_CODE 0x07\n#endif\n\n#ifdef __50SERIES /* Prime/PRIMOS */\n# define OS_CODE 0x0F\n#endif\n\n#ifdef TOPS20\n# define OS_CODE 0x0a\n#endif\n\n#if defined(_BEOS_) || defined(RISCOS)\n# define fdopen(fd,mode) NULL /* No fdopen() */\n#endif\n\n /* Common defaults */\n\n#ifndef OS_CODE\n# define OS_CODE 0x03 /* assume Unix */\n#endif\n\n#ifndef FOPEN\n# define FOPEN(name, mode) fopen((name), (mode))\n#endif\n\n /* functions */\n\n#ifdef HAVE_STRERROR\n extern char *strerror OF((int));\n# define zstrerror(errnum) strerror(errnum)\n#else\n# define zstrerror(errnum) \"\"\n#endif\n\n#if defined(pyr)\n# define NO_MEMCPY\n#endif\n#if (defined(M_I86SM) || defined(M_I86MM)) && !defined(_MSC_VER)\n /* Use our own functions for small and medium model with MSC <= 5.0.\n * You may have to use the same strategy for Borland C (untested).\n */\n# define NO_MEMCPY\n#endif\n#if defined(STDC) && !defined(HAVE_MEMCPY) && !defined(NO_MEMCPY)\n# define HAVE_MEMCPY\n#endif\n#ifdef HAVE_MEMCPY\n# ifdef SMALL_MEDIUM /* MSDOS small or medium model */\n# define zmemcpy _fmemcpy\n# define zmemcmp _fmemcmp\n# define zmemzero(dest, len) _fmemset(dest, 0, len)\n# else\n# define zmemcpy memcpy\n# define zmemcmp memcmp\n# define zmemzero(dest, len) memset(dest, 0, len)\n# endif\n#else\n extern void zmemcpy OF((Bytef* dest, Bytef* source, uInt len));\n extern int zmemcmp OF((Bytef* s1, Bytef* s2, uInt len));\n extern void zmemzero OF((Bytef* dest, uInt len));\n#endif\n\n/* Diagnostic functions */\n#ifdef DEBUG_ZLIB\n# include \n# ifndef verbose\n# define verbose 0\n# endif\n extern void z_error OF((char *m));\n# define Assert(cond,msg) {if(!(cond)) z_error(msg);}\n# define Trace(x) fprintf x\n# define Tracev(x) {if (verbose) fprintf x ;}\n# define Tracevv(x) {if (verbose>1) fprintf x ;}\n# define Tracec(c,x) {if (verbose && (c)) fprintf x ;}\n# define Tracecv(c,x) {if (verbose>1 && (c)) fprintf x ;}\n#else\n# define Assert(cond,msg)\n# define Trace(x)\n# define Tracev(x)\n# define Tracevv(x)\n# define Tracec(c,x)\n# define Tracecv(c,x)\n#endif\n\n\ntypedef uLong (*check_func) OF((uLong check, const Bytef *buf, uInt len));\n\nvoidpf zcalloc OF((voidpf opaque, unsigned items, unsigned size));\nvoid zcfree OF((voidpf opaque, voidpf ptr));\n\n#define ZALLOC(strm, items, size) \\\n (*((strm)->zalloc))((strm)->opaque, (items), (size))\n#define ZFREE(strm, addr) (*((strm)->zfree))((strm)->opaque, (voidpf)(addr))\n#define TRY_FREE(s, p) {if (p) ZFREE(s, p);}\n\n#endif /* _Z_UTIL_H */\n/* --- zutil.h */\n\n/* +++ deflate.h */\n/* deflate.h -- internal compression state\n * Copyright (C) 1995-1996 Jean-loup Gailly\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* WARNING: this file should *not* be used by applications. It is\n part of the implementation of the compression library and is\n subject to change. Applications should only use zlib.h.\n */\n\n/* From: deflate.h,v 1.10 1996/07/02 12:41:00 me Exp $ */\n\n#ifndef _DEFLATE_H\n#define _DEFLATE_H\n\n/* #include \"zutil.h\" */\n\n/* ===========================================================================\n * Internal compression state.\n */\n\n#define LENGTH_CODES 29\n/* number of length codes, not counting the special END_BLOCK code */\n\n#define LITERALS 256\n/* number of literal bytes 0..255 */\n\n#define L_CODES (LITERALS+1+LENGTH_CODES)\n/* number of Literal or Length codes, including the END_BLOCK code */\n\n#define D_CODES 30\n/* number of distance codes */\n\n#define BL_CODES 19\n/* number of codes used to transfer the bit lengths */\n\n#define HEAP_SIZE (2*L_CODES+1)\n/* maximum heap size */\n\n#define MAX_BITS 15\n/* All codes must not exceed MAX_BITS bits */\n\n#define INIT_STATE 42\n#define BUSY_STATE 113\n#define FINISH_STATE 666\n/* Stream status */\n\n\n/* Data structure describing a single value and its code string. */\ntypedef struct ct_data_s {\n union {\n ush freq; /* frequency count */\n ush code; /* bit string */\n } fc;\n union {\n ush dad; /* father node in Huffman tree */\n ush len; /* length of bit string */\n } dl;\n} FAR ct_data;\n\n#define Freq fc.freq\n#define Code fc.code\n#define Dad dl.dad\n#define Len dl.len\n\ntypedef struct static_tree_desc_s static_tree_desc;\n\ntypedef struct tree_desc_s {\n ct_data *dyn_tree; /* the dynamic tree */\n int max_code; /* largest code with non zero frequency */\n static_tree_desc *stat_desc; /* the corresponding static tree */\n} FAR tree_desc;\n\ntypedef ush Pos;\ntypedef Pos FAR Posf;\ntypedef unsigned IPos;\n\n/* A Pos is an index in the character window. We use short instead of int to\n * save space in the various tables. IPos is used only for parameter passing.\n */\n\ntypedef struct deflate_state {\n z_streamp strm; /* pointer back to this zlib stream */\n int status; /* as the name implies */\n Bytef *pending_buf; /* output still pending */\n ulg pending_buf_size; /* size of pending_buf */\n Bytef *pending_out; /* next pending byte to output to the stream */\n int pending; /* nb of bytes in the pending buffer */\n int noheader; /* suppress zlib header and adler32 */\n Byte data_type; /* UNKNOWN, BINARY or ASCII */\n Byte method; /* STORED (for zip only) or DEFLATED */\n int last_flush; /* value of flush param for previous deflate call */\n\n /* used by deflate.c: */\n\n uInt w_size; /* LZ77 window size (32K by default) */\n uInt w_bits; /* log2(w_size) (8..16) */\n uInt w_mask; /* w_size - 1 */\n\n Bytef *window;\n /* Sliding window. Input bytes are read into the second half of the window,\n * and move to the first half later to keep a dictionary of at least wSize\n * bytes. With this organization, matches are limited to a distance of\n * wSize-MAX_MATCH bytes, but this ensures that IO is always\n * performed with a length multiple of the block size. Also, it limits\n * the window size to 64K, which is quite useful on MSDOS.\n * To do: use the user input buffer as sliding window.\n */\n\n ulg window_size;\n /* Actual size of window: 2*wSize, except when the user input buffer\n * is directly used as sliding window.\n */\n\n Posf *prev;\n /* Link to older string with same hash index. To limit the size of this\n * array to 64K, this link is maintained only for the last 32K strings.\n * An index in this array is thus a window index modulo 32K.\n */\n\n Posf *head; /* Heads of the hash chains or NIL. */\n\n uInt ins_h; /* hash index of string to be inserted */\n uInt hash_size; /* number of elements in hash table */\n uInt hash_bits; /* log2(hash_size) */\n uInt hash_mask; /* hash_size-1 */\n\n uInt hash_shift;\n /* Number of bits by which ins_h must be shifted at each input\n * step. It must be such that after MIN_MATCH steps, the oldest\n * byte no longer takes part in the hash key, that is:\n * hash_shift * MIN_MATCH >= hash_bits\n */\n\n long block_start;\n /* Window position at the beginning of the current output block. Gets\n * negative when the window is moved backwards.\n */\n\n uInt match_length; /* length of best match */\n IPos prev_match; /* previous match */\n int match_available; /* set if previous match exists */\n uInt strstart; /* start of string to insert */\n uInt match_start; /* start of matching string */\n uInt lookahead; /* number of valid bytes ahead in window */\n\n uInt prev_length;\n /* Length of the best match at previous step. Matches not greater than this\n * are discarded. This is used in the lazy match evaluation.\n */\n\n uInt max_chain_length;\n /* To speed up deflation, hash chains are never searched beyond this\n * length. A higher limit improves compression ratio but degrades the\n * speed.\n */\n\n uInt max_lazy_match;\n /* Attempt to find a better match only when the current match is strictly\n * smaller than this value. This mechanism is used only for compression\n * levels >= 4.\n */\n# define max_insert_length max_lazy_match\n /* Insert new strings in the hash table only if the match length is not\n * greater than this length. This saves time but degrades compression.\n * max_insert_length is used only for compression levels <= 3.\n */\n\n int level; /* compression level (1..9) */\n int strategy; /* favor or force Huffman coding*/\n\n uInt good_match;\n /* Use a faster search when the previous match is longer than this */\n\n int nice_match; /* Stop searching when current match exceeds this */\n\n /* used by trees.c: */\n /* Didn't use ct_data typedef below to supress compiler warning */\n struct ct_data_s dyn_ltree[HEAP_SIZE]; /* literal and length tree */\n struct ct_data_s dyn_dtree[2*D_CODES+1]; /* distance tree */\n struct ct_data_s bl_tree[2*BL_CODES+1]; /* Huffman tree for bit lengths */\n\n struct tree_desc_s l_desc; /* desc. for literal tree */\n struct tree_desc_s d_desc; /* desc. for distance tree */\n struct tree_desc_s bl_desc; /* desc. for bit length tree */\n\n ush bl_count[MAX_BITS+1];\n /* number of codes at each bit length for an optimal tree */\n\n int heap[2*L_CODES+1]; /* heap used to build the Huffman trees */\n int heap_len; /* number of elements in the heap */\n int heap_max; /* element of largest frequency */\n /* The sons of heap[n] are heap[2*n] and heap[2*n+1]. heap[0] is not used.\n * The same heap array is used to build all trees.\n */\n\n uch depth[2*L_CODES+1];\n /* Depth of each subtree used as tie breaker for trees of equal frequency\n */\n\n uchf *l_buf; /* buffer for literals or lengths */\n\n uInt lit_bufsize;\n /* Size of match buffer for literals/lengths. There are 4 reasons for\n * limiting lit_bufsize to 64K:\n * - frequencies can be kept in 16 bit counters\n * - if compression is not successful for the first block, all input\n * data is still in the window so we can still emit a stored block even\n * when input comes from standard input. (This can also be done for\n * all blocks if lit_bufsize is not greater than 32K.)\n * - if compression is not successful for a file smaller than 64K, we can\n * even emit a stored file instead of a stored block (saving 5 bytes).\n * This is applicable only for zip (not gzip or zlib).\n * - creating new Huffman trees less frequently may not provide fast\n * adaptation to changes in the input data statistics. (Take for\n * example a binary file with poorly compressible code followed by\n * a highly compressible string table.) Smaller buffer sizes give\n * fast adaptation but have of course the overhead of transmitting\n * trees more frequently.\n * - I can't count above 4\n */\n\n uInt last_lit; /* running index in l_buf */\n\n ushf *d_buf;\n /* Buffer for distances. To simplify the code, d_buf and l_buf have\n * the same number of elements. To use different lengths, an extra flag\n * array would be necessary.\n */\n\n ulg opt_len; /* bit length of current block with optimal trees */\n ulg static_len; /* bit length of current block with static trees */\n ulg compressed_len; /* total bit length of compressed file */\n uInt matches; /* number of string matches in current block */\n int last_eob_len; /* bit length of EOB code for last block */\n\n#ifdef DEBUG_ZLIB\n ulg bits_sent; /* bit length of the compressed data */\n#endif\n\n ush bi_buf;\n /* Output buffer. bits are inserted starting at the bottom (least\n * significant bits).\n */\n int bi_valid;\n /* Number of valid bits in bi_buf. All bits above the last valid bit\n * are always zero.\n */\n\n} FAR deflate_state;\n\n/* Output a byte on the stream.\n * IN assertion: there is enough room in pending_buf.\n */\n#define put_byte(s, c) {s->pending_buf[s->pending++] = (c);}\n\n\n#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)\n/* Minimum amount of lookahead, except at the end of the input file.\n * See deflate.c for comments about the MIN_MATCH+1.\n */\n\n#define MAX_DIST(s) ((s)->w_size-MIN_LOOKAHEAD)\n/* In order to simplify the code, particularly on 16 bit machines, match\n * distances are limited to MAX_DIST instead of WSIZE.\n */\n\n /* in trees.c */\nvoid _tr_init OF((deflate_state *s));\nint _tr_tally OF((deflate_state *s, unsigned dist, unsigned lc));\nulg _tr_flush_block OF((deflate_state *s, charf *buf, ulg stored_len,\n\t\t\t int eof));\nvoid _tr_align OF((deflate_state *s));\nvoid _tr_stored_block OF((deflate_state *s, charf *buf, ulg stored_len,\n int eof));\nvoid _tr_stored_type_only OF((deflate_state *));\n\n#endif\n/* --- deflate.h */\n\n/* +++ deflate.c */\n/* deflate.c -- compress data using the deflation algorithm\n * Copyright (C) 1995-1996 Jean-loup Gailly.\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/*\n * ALGORITHM\n *\n * The \"deflation\" process depends on being able to identify portions\n * of the input text which are identical to earlier input (within a\n * sliding window trailing behind the input currently being processed).\n *\n * The most straightforward technique turns out to be the fastest for\n * most input files: try all possible matches and select the longest.\n * The key feature of this algorithm is that insertions into the string\n * dictionary are very simple and thus fast, and deletions are avoided\n * completely. Insertions are performed at each input character, whereas\n * string matches are performed only when the previous match ends. So it\n * is preferable to spend more time in matches to allow very fast string\n * insertions and avoid deletions. The matching algorithm for small\n * strings is inspired from that of Rabin & Karp. A brute force approach\n * is used to find longer strings when a small match has been found.\n * A similar algorithm is used in comic (by Jan-Mark Wams) and freeze\n * (by Leonid Broukhis).\n * A previous version of this file used a more sophisticated algorithm\n * (by Fiala and Greene) which is guaranteed to run in linear amortized\n * time, but has a larger average cost, uses more memory and is patented.\n * However the F&G algorithm may be faster for some highly redundant\n * files if the parameter max_chain_length (described below) is too large.\n *\n * ACKNOWLEDGEMENTS\n *\n * The idea of lazy evaluation of matches is due to Jan-Mark Wams, and\n * I found it in 'freeze' written by Leonid Broukhis.\n * Thanks to many people for bug reports and testing.\n *\n * REFERENCES\n *\n * Deutsch, L.P.,\"DEFLATE Compressed Data Format Specification\".\n * Available in ftp://ds.internic.net/rfc/rfc1951.txt\n *\n * A description of the Rabin and Karp algorithm is given in the book\n * \"Algorithms\" by R. Sedgewick, Addison-Wesley, p252.\n *\n * Fiala,E.R., and Greene,D.H.\n * Data Compression with Finite Windows, Comm.ACM, 32,4 (1989) 490-595\n *\n */\n\n/* From: deflate.c,v 1.15 1996/07/24 13:40:58 me Exp $ */\n\n/* #include \"deflate.h\" */\n\nchar deflate_copyright[] = \" deflate 1.0.4 Copyright 1995-1996 Jean-loup Gailly \";\n/*\n If you use the zlib library in a product, an acknowledgment is welcome\n in the documentation of your product. If for some reason you cannot\n include such an acknowledgment, I would appreciate that you keep this\n copyright string in the executable of your product.\n */\n\n/* ===========================================================================\n * Function prototypes.\n */\ntypedef enum {\n need_more, /* block not completed, need more input or more output */\n block_done, /* block flush performed */\n finish_started, /* finish started, need only more output at next deflate */\n finish_done /* finish done, accept no more input or output */\n} block_state;\n\ntypedef block_state (*compress_func) OF((deflate_state *s, int flush));\n/* Compression function. Returns the block state after the call. */\n\nlocal void fill_window OF((deflate_state *s));\nlocal block_state deflate_stored OF((deflate_state *s, int flush));\nlocal block_state deflate_fast OF((deflate_state *s, int flush));\nlocal block_state deflate_slow OF((deflate_state *s, int flush));\nlocal void lm_init OF((deflate_state *s));\nlocal void putShortMSB OF((deflate_state *s, uInt b));\nlocal void flush_pending OF((z_streamp strm));\nlocal int read_buf OF((z_streamp strm, charf *buf, unsigned size));\n#ifdef ASMV\n void match_init OF((void)); /* asm code initialization */\n uInt longest_match OF((deflate_state *s, IPos cur_match));\n#else\nlocal uInt longest_match OF((deflate_state *s, IPos cur_match));\n#endif\n\n#ifdef DEBUG_ZLIB\nlocal void check_match OF((deflate_state *s, IPos start, IPos match,\n int length));\n#endif\n\n/* ===========================================================================\n * Local data\n */\n\n#define NIL 0\n/* Tail of hash chains */\n\n#ifndef TOO_FAR\n# define TOO_FAR 4096\n#endif\n/* Matches of length 3 are discarded if their distance exceeds TOO_FAR */\n\n#define MIN_LOOKAHEAD (MAX_MATCH+MIN_MATCH+1)\n/* Minimum amount of lookahead, except at the end of the input file.\n * See deflate.c for comments about the MIN_MATCH+1.\n */\n\n/* Values for max_lazy_match, good_match and max_chain_length, depending on\n * the desired pack level (0..9). The values given below have been tuned to\n * exclude worst case performance for pathological files. Better values may be\n * found for specific files.\n */\ntypedef struct config_s {\n ush good_length; /* reduce lazy search above this match length */\n ush max_lazy; /* do not perform lazy search above this match length */\n ush nice_length; /* quit search above this match length */\n ush max_chain;\n compress_func func;\n} config;\n\nlocal config configuration_table[10] = {\n/* good lazy nice chain */\n/* 0 */ {0, 0, 0, 0, deflate_stored}, /* store only */\n/* 1 */ {4, 4, 8, 4, deflate_fast}, /* maximum speed, no lazy matches */\n/* 2 */ {4, 5, 16, 8, deflate_fast},\n/* 3 */ {4, 6, 32, 32, deflate_fast},\n\n/* 4 */ {4, 4, 16, 16, deflate_slow}, /* lazy matches */\n/* 5 */ {8, 16, 32, 32, deflate_slow},\n/* 6 */ {8, 16, 128, 128, deflate_slow},\n/* 7 */ {8, 32, 128, 256, deflate_slow},\n/* 8 */ {32, 128, 258, 1024, deflate_slow},\n/* 9 */ {32, 258, 258, 4096, deflate_slow}}; /* maximum compression */\n\n/* Note: the deflate() code requires max_lazy >= MIN_MATCH and max_chain >= 4\n * For deflate_fast() (levels <= 3) good is ignored and lazy has a different\n * meaning.\n */\n\n#define EQUAL 0\n/* result of memcmp for equal strings */\n\n#ifndef NO_DUMMY_DECL\nstruct static_tree_desc_s {int dummy;}; /* for buggy compilers */\n#endif\n\n/* ===========================================================================\n * Update a hash value with the given input byte\n * IN assertion: all calls to to UPDATE_HASH are made with consecutive\n * input characters, so that a running hash key can be computed from the\n * previous key instead of complete recalculation each time.\n */\n#define UPDATE_HASH(s,h,c) (h = (((h)<hash_shift) ^ (c)) & s->hash_mask)\n\n\n/* ===========================================================================\n * Insert string str in the dictionary and set match_head to the previous head\n * of the hash chain (the most recent string with same hash key). Return\n * the previous length of the hash chain.\n * IN assertion: all calls to to INSERT_STRING are made with consecutive\n * input characters and the first MIN_MATCH bytes of str are valid\n * (except for the last MIN_MATCH-1 bytes of the input file).\n */\n#define INSERT_STRING(s, str, match_head) \\\n (UPDATE_HASH(s, s->ins_h, s->window[(str) + (MIN_MATCH-1)]), \\\n s->prev[(str) & s->w_mask] = match_head = s->head[s->ins_h], \\\n s->head[s->ins_h] = (Pos)(str))\n\n/* ===========================================================================\n * Initialize the hash table (avoiding 64K overflow for 16 bit systems).\n * prev[] will be initialized on the fly.\n */\n#define CLEAR_HASH(s) \\\n s->head[s->hash_size-1] = NIL; \\\n zmemzero((charf *)s->head, (unsigned)(s->hash_size-1)*sizeof(*s->head));\n\n/* ========================================================================= */\nint deflateInit_(strm, level, version, stream_size)\n z_streamp strm;\n int level;\n const char *version;\n int stream_size;\n{\n return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,\n\t\t\t Z_DEFAULT_STRATEGY, version, stream_size);\n /* To do: ignore strm->next_in if we use it as window */\n}\n\n/* ========================================================================= */\nint deflateInit2_(strm, level, method, windowBits, memLevel, strategy,\n\t\t version, stream_size)\n z_streamp strm;\n int level;\n int method;\n int windowBits;\n int memLevel;\n int strategy;\n const char *version;\n int stream_size;\n{\n deflate_state *s;\n int noheader = 0;\n static char* my_version = ZLIB_VERSION;\n\n ushf *overlay;\n /* We overlay pending_buf and d_buf+l_buf. This works since the average\n * output size for (length,distance) codes is <= 24 bits.\n */\n\n if (version == Z_NULL || version[0] != my_version[0] ||\n stream_size != sizeof(z_stream)) {\n\treturn Z_VERSION_ERROR;\n }\n if (strm == Z_NULL) return Z_STREAM_ERROR;\n\n strm->msg = Z_NULL;\n#ifndef NO_ZCFUNCS\n if (strm->zalloc == Z_NULL) {\n\tstrm->zalloc = zcalloc;\n\tstrm->opaque = (voidpf)0;\n }\n if (strm->zfree == Z_NULL) strm->zfree = zcfree;\n#endif\n\n if (level == Z_DEFAULT_COMPRESSION) level = 6;\n\n if (windowBits < 0) { /* undocumented feature: suppress zlib header */\n noheader = 1;\n windowBits = -windowBits;\n }\n if (memLevel < 1 || memLevel > MAX_MEM_LEVEL || method != Z_DEFLATED ||\n windowBits < 9 || windowBits > 15 || level < 0 || level > 9 ||\n\tstrategy < 0 || strategy > Z_HUFFMAN_ONLY) {\n return Z_STREAM_ERROR;\n }\n s = (deflate_state *) ZALLOC(strm, 1, sizeof(deflate_state));\n if (s == Z_NULL) return Z_MEM_ERROR;\n strm->state = (struct internal_state FAR *)s;\n s->strm = strm;\n\n s->noheader = noheader;\n s->w_bits = windowBits;\n s->w_size = 1 << s->w_bits;\n s->w_mask = s->w_size - 1;\n\n s->hash_bits = memLevel + 7;\n s->hash_size = 1 << s->hash_bits;\n s->hash_mask = s->hash_size - 1;\n s->hash_shift = ((s->hash_bits+MIN_MATCH-1)/MIN_MATCH);\n\n s->window = (Bytef *) ZALLOC(strm, s->w_size, 2*sizeof(Byte));\n s->prev = (Posf *) ZALLOC(strm, s->w_size, sizeof(Pos));\n s->head = (Posf *) ZALLOC(strm, s->hash_size, sizeof(Pos));\n\n s->lit_bufsize = 1 << (memLevel + 6); /* 16K elements by default */\n\n overlay = (ushf *) ZALLOC(strm, s->lit_bufsize, sizeof(ush)+2);\n s->pending_buf = (uchf *) overlay;\n s->pending_buf_size = (ulg)s->lit_bufsize * (sizeof(ush)+2L);\n\n if (s->window == Z_NULL || s->prev == Z_NULL || s->head == Z_NULL ||\n s->pending_buf == Z_NULL) {\n strm->msg = (const char*)ERR_MSG(Z_MEM_ERROR);\n deflateEnd (strm);\n return Z_MEM_ERROR;\n }\n s->d_buf = overlay + s->lit_bufsize/sizeof(ush);\n s->l_buf = s->pending_buf + (1+sizeof(ush))*s->lit_bufsize;\n\n s->level = level;\n s->strategy = strategy;\n s->method = (Byte)method;\n\n return deflateReset(strm);\n}\n\n/* ========================================================================= */\nint deflateSetDictionary (strm, dictionary, dictLength)\n z_streamp strm;\n const Bytef *dictionary;\n uInt dictLength;\n{\n deflate_state *s;\n uInt length = dictLength;\n uInt n;\n IPos hash_head = 0;\n\n if (strm == Z_NULL || strm->state == Z_NULL || dictionary == Z_NULL)\n\treturn Z_STREAM_ERROR;\n\n s = (deflate_state *) strm->state;\n if (s->status != INIT_STATE) return Z_STREAM_ERROR;\n\n strm->adler = adler32(strm->adler, dictionary, dictLength);\n\n if (length < MIN_MATCH) return Z_OK;\n if (length > MAX_DIST(s)) {\n\tlength = MAX_DIST(s);\n#ifndef USE_DICT_HEAD\n\tdictionary += dictLength - length; /* use the tail of the dictionary */\n#endif\n }\n zmemcpy((charf *)s->window, dictionary, length);\n s->strstart = length;\n s->block_start = (long)length;\n\n /* Insert all strings in the hash table (except for the last two bytes).\n * s->lookahead stays null, so s->ins_h will be recomputed at the next\n * call of fill_window.\n */\n s->ins_h = s->window[0];\n UPDATE_HASH(s, s->ins_h, s->window[1]);\n for (n = 0; n <= length - MIN_MATCH; n++) {\n\tINSERT_STRING(s, n, hash_head);\n }\n if (hash_head) hash_head = 0; /* to make compiler happy */\n return Z_OK;\n}\n\n/* ========================================================================= */\nint deflateReset (strm)\n z_streamp strm;\n{\n deflate_state *s;\n \n if (strm == Z_NULL || strm->state == Z_NULL ||\n strm->zalloc == Z_NULL || strm->zfree == Z_NULL) return Z_STREAM_ERROR;\n\n strm->total_in = strm->total_out = 0;\n strm->msg = Z_NULL; /* use zfree if we ever allocate msg dynamically */\n strm->data_type = Z_UNKNOWN;\n\n s = (deflate_state *)strm->state;\n s->pending = 0;\n s->pending_out = s->pending_buf;\n\n if (s->noheader < 0) {\n s->noheader = 0; /* was set to -1 by deflate(..., Z_FINISH); */\n }\n s->status = s->noheader ? BUSY_STATE : INIT_STATE;\n strm->adler = 1;\n s->last_flush = Z_NO_FLUSH;\n\n _tr_init(s);\n lm_init(s);\n\n return Z_OK;\n}\n\n/* ========================================================================= */\nint deflateParams(strm, level, strategy)\n z_streamp strm;\n int level;\n int strategy;\n{\n deflate_state *s;\n compress_func func;\n int err = Z_OK;\n\n if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;\n s = (deflate_state *) strm->state;\n\n if (level == Z_DEFAULT_COMPRESSION) {\n\tlevel = 6;\n }\n if (level < 0 || level > 9 || strategy < 0 || strategy > Z_HUFFMAN_ONLY) {\n\treturn Z_STREAM_ERROR;\n }\n func = configuration_table[s->level].func;\n\n if (func != configuration_table[level].func && strm->total_in != 0) {\n\t/* Flush the last buffer: */\n\terr = deflate(strm, Z_PARTIAL_FLUSH);\n }\n if (s->level != level) {\n\ts->level = level;\n\ts->max_lazy_match = configuration_table[level].max_lazy;\n\ts->good_match = configuration_table[level].good_length;\n\ts->nice_match = configuration_table[level].nice_length;\n\ts->max_chain_length = configuration_table[level].max_chain;\n }\n s->strategy = strategy;\n return err;\n}\n\n/* =========================================================================\n * Put a short in the pending buffer. The 16-bit value is put in MSB order.\n * IN assertion: the stream state is correct and there is enough room in\n * pending_buf.\n */\nlocal void putShortMSB (s, b)\n deflate_state *s;\n uInt b;\n{\n put_byte(s, (Byte)(b >> 8));\n put_byte(s, (Byte)(b & 0xff));\n} \n\n/* =========================================================================\n * Flush as much pending output as possible. All deflate() output goes\n * through this function so some applications may wish to modify it\n * to avoid allocating a large strm->next_out buffer and copying into it.\n * (See also read_buf()).\n */\nlocal void flush_pending(strm)\n z_streamp strm;\n{\n deflate_state *s = (deflate_state *) strm->state;\n unsigned len = s->pending;\n\n if (len > strm->avail_out) len = strm->avail_out;\n if (len == 0) return;\n\n if (strm->next_out != Z_NULL) {\n\tzmemcpy(strm->next_out, s->pending_out, len);\n\tstrm->next_out += len;\n }\n s->pending_out += len;\n strm->total_out += len;\n strm->avail_out -= len;\n s->pending -= len;\n if (s->pending == 0) {\n s->pending_out = s->pending_buf;\n }\n}\n\n/* ========================================================================= */\nint deflate (strm, flush)\n z_streamp strm;\n int flush;\n{\n int old_flush; /* value of flush param for previous deflate call */\n deflate_state *s;\n\n if (strm == Z_NULL || strm->state == Z_NULL ||\n\tflush > Z_FINISH || flush < 0) {\n return Z_STREAM_ERROR;\n }\n s = (deflate_state *) strm->state;\n\n if ((strm->next_in == Z_NULL && strm->avail_in != 0) ||\n\t(s->status == FINISH_STATE && flush != Z_FINISH)) {\n ERR_RETURN(strm, Z_STREAM_ERROR);\n }\n if (strm->avail_out == 0) ERR_RETURN(strm, Z_BUF_ERROR);\n\n s->strm = strm; /* just in case */\n old_flush = s->last_flush;\n s->last_flush = flush;\n\n /* Write the zlib header */\n if (s->status == INIT_STATE) {\n\n uInt header = (Z_DEFLATED + ((s->w_bits-8)<<4)) << 8;\n uInt level_flags = (s->level-1) >> 1;\n\n if (level_flags > 3) level_flags = 3;\n header |= (level_flags << 6);\n\tif (s->strstart != 0) header |= PRESET_DICT;\n header += 31 - (header % 31);\n\n s->status = BUSY_STATE;\n putShortMSB(s, header);\n\n\t/* Save the adler32 of the preset dictionary: */\n\tif (s->strstart != 0) {\n\t putShortMSB(s, (uInt)(strm->adler >> 16));\n\t putShortMSB(s, (uInt)(strm->adler & 0xffff));\n\t}\n\tstrm->adler = 1L;\n }\n\n /* Flush as much pending output as possible */\n if (s->pending != 0) {\n flush_pending(strm);\n if (strm->avail_out == 0) {\n\t /* Since avail_out is 0, deflate will be called again with\n\t * more output space, but possibly with both pending and\n\t * avail_in equal to zero. There won't be anything to do,\n\t * but this is not an error situation so make sure we\n\t * return OK instead of BUF_ERROR at next call of deflate:\n */\n\t s->last_flush = -1;\n\t return Z_OK;\n\t}\n\n /* Make sure there is something to do and avoid duplicate consecutive\n * flushes. For repeated and useless calls with Z_FINISH, we keep\n * returning Z_STREAM_END instead of Z_BUFF_ERROR.\n */\n } else if (strm->avail_in == 0 && flush <= old_flush &&\n\t flush != Z_FINISH) {\n ERR_RETURN(strm, Z_BUF_ERROR);\n }\n\n /* User must not provide more input after the first FINISH: */\n if (s->status == FINISH_STATE && strm->avail_in != 0) {\n ERR_RETURN(strm, Z_BUF_ERROR);\n }\n\n /* Start a new block or continue the current one.\n */\n if (strm->avail_in != 0 || s->lookahead != 0 ||\n (flush != Z_NO_FLUSH && s->status != FINISH_STATE)) {\n block_state bstate;\n\n\tbstate = (*(configuration_table[s->level].func))(s, flush);\n\n if (bstate == finish_started || bstate == finish_done) {\n s->status = FINISH_STATE;\n }\n if (bstate == need_more || bstate == finish_started) {\n\t if (strm->avail_out == 0) {\n\t s->last_flush = -1; /* avoid BUF_ERROR next call, see above */\n\t }\n\t return Z_OK;\n\t /* If flush != Z_NO_FLUSH && avail_out == 0, the next call\n\t * of deflate should use the same flush parameter to make sure\n\t * that the flush is complete. So we don't have to output an\n\t * empty block here, this will be done at next call. This also\n\t * ensures that for a very small output buffer, we emit at most\n\t * one empty block.\n\t */\n\t}\n if (bstate == block_done) {\n if (flush == Z_PARTIAL_FLUSH) {\n _tr_align(s);\n\t } else if (flush == Z_PACKET_FLUSH) {\n\t\t/* Output just the 3-bit `stored' block type value,\n\t\t but not a zero length. */\n\t\t_tr_stored_type_only(s);\n } else { /* FULL_FLUSH or SYNC_FLUSH */\n _tr_stored_block(s, (char*)0, 0L, 0);\n /* For a full flush, this empty block will be recognized\n * as a special marker by inflate_sync().\n */\n if (flush == Z_FULL_FLUSH) {\n CLEAR_HASH(s); /* forget history */\n }\n }\n flush_pending(strm);\n\t if (strm->avail_out == 0) {\n\t s->last_flush = -1; /* avoid BUF_ERROR at next call, see above */\n\t return Z_OK;\n\t }\n }\n }\n Assert(strm->avail_out > 0, \"bug2\");\n\n if (flush != Z_FINISH) return Z_OK;\n if (s->noheader) return Z_STREAM_END;\n\n /* Write the zlib trailer (adler32) */\n putShortMSB(s, (uInt)(strm->adler >> 16));\n putShortMSB(s, (uInt)(strm->adler & 0xffff));\n flush_pending(strm);\n /* If avail_out is zero, the application will call deflate again\n * to flush the rest.\n */\n s->noheader = -1; /* write the trailer only once! */\n return s->pending != 0 ? Z_OK : Z_STREAM_END;\n}\n\n/* ========================================================================= */\nint deflateEnd (strm)\n z_streamp strm;\n{\n int status;\n deflate_state *s;\n\n if (strm == Z_NULL || strm->state == Z_NULL) return Z_STREAM_ERROR;\n s = (deflate_state *) strm->state;\n\n status = s->status;\n if (status != INIT_STATE && status != BUSY_STATE &&\n\tstatus != FINISH_STATE) {\n return Z_STREAM_ERROR;\n }\n\n /* Deallocate in reverse order of allocations: */\n TRY_FREE(strm, s->pending_buf);\n TRY_FREE(strm, s->head);\n TRY_FREE(strm, s->prev);\n TRY_FREE(strm, s->window);\n\n ZFREE(strm, s);\n strm->state = Z_NULL;\n\n return status == BUSY_STATE ? Z_DATA_ERROR : Z_OK;\n}\n\n/* =========================================================================\n * Copy the source state to the destination state.\n */\nint deflateCopy (dest, source)\n z_streamp dest;\n z_streamp source;\n{\n deflate_state *ds;\n deflate_state *ss;\n ushf *overlay;\n\n if (source == Z_NULL || dest == Z_NULL || source->state == Z_NULL)\n return Z_STREAM_ERROR;\n ss = (deflate_state *) source->state;\n\n zmemcpy(dest, source, sizeof(*dest));\n\n ds = (deflate_state *) ZALLOC(dest, 1, sizeof(deflate_state));\n if (ds == Z_NULL) return Z_MEM_ERROR;\n dest->state = (struct internal_state FAR *) ds;\n zmemcpy(ds, ss, sizeof(*ds));\n ds->strm = dest;\n\n ds->window = (Bytef *) ZALLOC(dest, ds->w_size, 2*sizeof(Byte));\n ds->prev = (Posf *) ZALLOC(dest, ds->w_size, sizeof(Pos));\n ds->head = (Posf *) ZALLOC(dest, ds->hash_size, sizeof(Pos));\n overlay = (ushf *) ZALLOC(dest, ds->lit_bufsize, sizeof(ush)+2);\n ds->pending_buf = (uchf *) overlay;\n\n if (ds->window == Z_NULL || ds->prev == Z_NULL || ds->head == Z_NULL ||\n ds->pending_buf == Z_NULL) {\n deflateEnd (dest);\n return Z_MEM_ERROR;\n }\n /* ??? following zmemcpy doesn't work for 16-bit MSDOS */\n zmemcpy(ds->window, ss->window, ds->w_size * 2 * sizeof(Byte));\n zmemcpy(ds->prev, ss->prev, ds->w_size * sizeof(Pos));\n zmemcpy(ds->head, ss->head, ds->hash_size * sizeof(Pos));\n zmemcpy(ds->pending_buf, ss->pending_buf, (uInt)ds->pending_buf_size);\n\n ds->pending_out = ds->pending_buf + (ss->pending_out - ss->pending_buf);\n ds->d_buf = overlay + ds->lit_bufsize/sizeof(ush);\n ds->l_buf = ds->pending_buf + (1+sizeof(ush))*ds->lit_bufsize;\n\n ds->l_desc.dyn_tree = ds->dyn_ltree;\n ds->d_desc.dyn_tree = ds->dyn_dtree;\n ds->bl_desc.dyn_tree = ds->bl_tree;\n\n return Z_OK;\n}\n\n/* ===========================================================================\n * Return the number of bytes of output which are immediately available\n * for output from the decompressor.\n */\nint deflateOutputPending (strm)\n z_streamp strm;\n{\n if (strm == Z_NULL || strm->state == Z_NULL) return 0;\n \n return ((deflate_state *)(strm->state))->pending;\n}\n\n/* ===========================================================================\n * Read a new buffer from the current input stream, update the adler32\n * and total number of bytes read. All deflate() input goes through\n * this function so some applications may wish to modify it to avoid\n * allocating a large strm->next_in buffer and copying from it.\n * (See also flush_pending()).\n */\nlocal int read_buf(strm, buf, size)\n z_streamp strm;\n charf *buf;\n unsigned size;\n{\n unsigned len = strm->avail_in;\n\n if (len > size) len = size;\n if (len == 0) return 0;\n\n strm->avail_in -= len;\n\n if (!((deflate_state *)(strm->state))->noheader) {\n strm->adler = adler32(strm->adler, strm->next_in, len);\n }\n zmemcpy(buf, strm->next_in, len);\n strm->next_in += len;\n strm->total_in += len;\n\n return (int)len;\n}\n\n/* ===========================================================================\n * Initialize the \"longest match\" routines for a new zlib stream\n */\nlocal void lm_init (s)\n deflate_state *s;\n{\n s->window_size = (ulg)2L*s->w_size;\n\n CLEAR_HASH(s);\n\n /* Set the default configuration parameters:\n */\n s->max_lazy_match = configuration_table[s->level].max_lazy;\n s->good_match = configuration_table[s->level].good_length;\n s->nice_match = configuration_table[s->level].nice_length;\n s->max_chain_length = configuration_table[s->level].max_chain;\n\n s->strstart = 0;\n s->block_start = 0L;\n s->lookahead = 0;\n s->match_length = s->prev_length = MIN_MATCH-1;\n s->match_available = 0;\n s->ins_h = 0;\n#ifdef ASMV\n match_init(); /* initialize the asm code */\n#endif\n}\n\n/* ===========================================================================\n * Set match_start to the longest match starting at the given string and\n * return its length. Matches shorter or equal to prev_length are discarded,\n * in which case the result is equal to prev_length and match_start is\n * garbage.\n * IN assertions: cur_match is the head of the hash chain for the current\n * string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1\n * OUT assertion: the match length is not greater than s->lookahead.\n */\n#ifndef ASMV\n/* For 80x86 and 680x0, an optimized version will be provided in match.asm or\n * match.S. The code will be functionally equivalent.\n */\nlocal uInt longest_match(s, cur_match)\n deflate_state *s;\n IPos cur_match; /* current match */\n{\n unsigned chain_length = s->max_chain_length;/* max hash chain length */\n register Bytef *scan = s->window + s->strstart; /* current string */\n register Bytef *match; /* matched string */\n register int len; /* length of current match */\n int best_len = s->prev_length; /* best match length so far */\n int nice_match = s->nice_match; /* stop if match long enough */\n IPos limit = s->strstart > (IPos)MAX_DIST(s) ?\n s->strstart - (IPos)MAX_DIST(s) : NIL;\n /* Stop when cur_match becomes <= limit. To simplify the code,\n * we prevent matches with the string of window index 0.\n */\n Posf *prev = s->prev;\n uInt wmask = s->w_mask;\n\n#ifdef UNALIGNED_OK\n /* Compare two bytes at a time. Note: this is not always beneficial.\n * Try with and without -DUNALIGNED_OK to check.\n */\n register Bytef *strend = s->window + s->strstart + MAX_MATCH - 1;\n register ush scan_start = *(ushf*)scan;\n register ush scan_end = *(ushf*)(scan+best_len-1);\n#else\n register Bytef *strend = s->window + s->strstart + MAX_MATCH;\n register Byte scan_end1 = scan[best_len-1];\n register Byte scan_end = scan[best_len];\n#endif\n\n /* The code is optimized for HASH_BITS >= 8 and MAX_MATCH-2 multiple of 16.\n * It is easy to get rid of this optimization if necessary.\n */\n Assert(s->hash_bits >= 8 && MAX_MATCH == 258, \"Code too clever\");\n\n /* Do not waste too much time if we already have a good match: */\n if (s->prev_length >= s->good_match) {\n chain_length >>= 2;\n }\n /* Do not look for matches beyond the end of the input. This is necessary\n * to make deflate deterministic.\n */\n if ((uInt)nice_match > s->lookahead) nice_match = s->lookahead;\n\n Assert((ulg)s->strstart <= s->window_size-MIN_LOOKAHEAD, \"need lookahead\");\n\n do {\n Assert(cur_match < s->strstart, \"no future\");\n match = s->window + cur_match;\n\n /* Skip to next match if the match length cannot increase\n * or if the match length is less than 2:\n */\n#if (defined(UNALIGNED_OK) && MAX_MATCH == 258)\n /* This code assumes sizeof(unsigned short) == 2. Do not use\n * UNALIGNED_OK if your compiler uses a different size.\n */\n if (*(ushf*)(match+best_len-1) != scan_end ||\n *(ushf*)match != scan_start) continue;\n\n /* It is not necessary to compare scan[2] and match[2] since they are\n * always equal when the other bytes match, given that the hash keys\n * are equal and that HASH_BITS >= 8. Compare 2 bytes at a time at\n * strstart+3, +5, ... up to strstart+257. We check for insufficient\n * lookahead only every 4th comparison; the 128th check will be made\n * at strstart+257. If MAX_MATCH-2 is not a multiple of 8, it is\n * necessary to put more guard bytes at the end of the window, or\n * to check more often for insufficient lookahead.\n */\n Assert(scan[2] == match[2], \"scan[2]?\");\n scan++, match++;\n do {\n } while (*(ushf*)(scan+=2) == *(ushf*)(match+=2) &&\n *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&\n *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&\n *(ushf*)(scan+=2) == *(ushf*)(match+=2) &&\n scan < strend);\n /* The funny \"do {}\" generates better code on most compilers */\n\n /* Here, scan <= window+strstart+257 */\n Assert(scan <= s->window+(unsigned)(s->window_size-1), \"wild scan\");\n if (*scan == *match) scan++;\n\n len = (MAX_MATCH - 1) - (int)(strend-scan);\n scan = strend - (MAX_MATCH-1);\n\n#else /* UNALIGNED_OK */\n\n if (match[best_len] != scan_end ||\n match[best_len-1] != scan_end1 ||\n *match != *scan ||\n *++match != scan[1]) continue;\n\n /* The check at best_len-1 can be removed because it will be made\n * again later. (This heuristic is not always a win.)\n * It is not necessary to compare scan[2] and match[2] since they\n * are always equal when the other bytes match, given that\n * the hash keys are equal and that HASH_BITS >= 8.\n */\n scan += 2, match++;\n Assert(*scan == *match, \"match[2]?\");\n\n /* We check for insufficient lookahead only every 8th comparison;\n * the 256th check will be made at strstart+258.\n */\n do {\n } while (*++scan == *++match && *++scan == *++match &&\n *++scan == *++match && *++scan == *++match &&\n *++scan == *++match && *++scan == *++match &&\n *++scan == *++match && *++scan == *++match &&\n scan < strend);\n\n Assert(scan <= s->window+(unsigned)(s->window_size-1), \"wild scan\");\n\n len = MAX_MATCH - (int)(strend - scan);\n scan = strend - MAX_MATCH;\n\n#endif /* UNALIGNED_OK */\n\n if (len > best_len) {\n s->match_start = cur_match;\n best_len = len;\n if (len >= nice_match) break;\n#ifdef UNALIGNED_OK\n scan_end = *(ushf*)(scan+best_len-1);\n#else\n scan_end1 = scan[best_len-1];\n scan_end = scan[best_len];\n#endif\n }\n } while ((cur_match = prev[cur_match & wmask]) > limit\n && --chain_length != 0);\n\n if ((uInt)best_len <= s->lookahead) return best_len;\n return s->lookahead;\n}\n#endif /* ASMV */\n\n#ifdef DEBUG_ZLIB\n/* ===========================================================================\n * Check that the match at match_start is indeed a match.\n */\nlocal void check_match(s, start, match, length)\n deflate_state *s;\n IPos start, match;\n int length;\n{\n /* check that the match is indeed a match */\n if (zmemcmp((charf *)s->window + match,\n (charf *)s->window + start, length) != EQUAL) {\n fprintf(stderr, \" start %u, match %u, length %d\\n\",\n\t\tstart, match, length);\n do {\n\t fprintf(stderr, \"%c%c\", s->window[match++], s->window[start++]);\n\t} while (--length != 0);\n z_error(\"invalid match\");\n }\n if (z_verbose > 1) {\n fprintf(stderr,\"\\\\[%d,%d]\", start-match, length);\n do { putc(s->window[start++], stderr); } while (--length != 0);\n }\n}\n#else\n# define check_match(s, start, match, length)\n#endif\n\n/* ===========================================================================\n * Fill the window when the lookahead becomes insufficient.\n * Updates strstart and lookahead.\n *\n * IN assertion: lookahead < MIN_LOOKAHEAD\n * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD\n * At least one byte has been read, or avail_in == 0; reads are\n * performed for at least two bytes (required for the zip translate_eol\n * option -- not supported here).\n */\nlocal void fill_window(s)\n deflate_state *s;\n{\n register unsigned n, m;\n register Posf *p;\n unsigned more; /* Amount of free space at the end of the window. */\n uInt wsize = s->w_size;\n\n do {\n more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);\n\n /* Deal with !@#$% 64K limit: */\n if (more == 0 && s->strstart == 0 && s->lookahead == 0) {\n more = wsize;\n\n } else if (more == (unsigned)(-1)) {\n /* Very unlikely, but possible on 16 bit machine if strstart == 0\n * and lookahead == 1 (input done one byte at time)\n */\n more--;\n\n /* If the window is almost full and there is insufficient lookahead,\n * move the upper half to the lower one to make room in the upper half.\n */\n } else if (s->strstart >= wsize+MAX_DIST(s)) {\n\n zmemcpy((charf *)s->window, (charf *)s->window+wsize,\n (unsigned)wsize);\n s->match_start -= wsize;\n s->strstart -= wsize; /* we now have strstart >= MAX_DIST */\n s->block_start -= (long) wsize;\n\n /* Slide the hash table (could be avoided with 32 bit values\n at the expense of memory usage). We slide even when level == 0\n to keep the hash table consistent if we switch back to level > 0\n later. (Using level 0 permanently is not an optimal usage of\n zlib, so we don't care about this pathological case.)\n */\n n = s->hash_size;\n p = &s->head[n];\n do {\n m = *--p;\n *p = (Pos)(m >= wsize ? m-wsize : NIL);\n } while (--n);\n\n n = wsize;\n p = &s->prev[n];\n do {\n m = *--p;\n *p = (Pos)(m >= wsize ? m-wsize : NIL);\n /* If n is not on any hash chain, prev[n] is garbage but\n * its value will never be used.\n */\n } while (--n);\n more += wsize;\n }\n if (s->strm->avail_in == 0) return;\n\n /* If there was no sliding:\n * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&\n * more == window_size - lookahead - strstart\n * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)\n * => more >= window_size - 2*WSIZE + 2\n * In the BIG_MEM or MMAP case (not yet supported),\n * window_size == input_size + MIN_LOOKAHEAD &&\n * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.\n * Otherwise, window_size == 2*WSIZE so more >= 2.\n * If there was sliding, more >= WSIZE. So in all cases, more >= 2.\n */\n Assert(more >= 2, \"more < 2\");\n\n n = read_buf(s->strm, (charf *)s->window + s->strstart + s->lookahead,\n more);\n s->lookahead += n;\n\n /* Initialize the hash value now that we have some input: */\n if (s->lookahead >= MIN_MATCH) {\n s->ins_h = s->window[s->strstart];\n UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);\n#if MIN_MATCH != 3\n Call UPDATE_HASH() MIN_MATCH-3 more times\n#endif\n }\n /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,\n * but this is not important since only literal bytes will be emitted.\n */\n\n } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);\n}\n\n/* ===========================================================================\n * Flush the current block, with given end-of-file flag.\n * IN assertion: strstart is set to the end of the current match.\n */\n#define FLUSH_BLOCK_ONLY(s, eof) { \\\n _tr_flush_block(s, (s->block_start >= 0L ? \\\n (charf *)&s->window[(unsigned)s->block_start] : \\\n (charf *)Z_NULL), \\\n\t\t(ulg)((long)s->strstart - s->block_start), \\\n\t\t(eof)); \\\n s->block_start = s->strstart; \\\n flush_pending(s->strm); \\\n Tracev((stderr,\"[FLUSH]\")); \\\n}\n\n/* Same but force premature exit if necessary. */\n#define FLUSH_BLOCK(s, eof) { \\\n FLUSH_BLOCK_ONLY(s, eof); \\\n if (s->strm->avail_out == 0) return (eof) ? finish_started : need_more; \\\n}\n\n/* ===========================================================================\n * Copy without compression as much as possible from the input stream, return\n * the current block state.\n * This function does not insert new strings in the dictionary since\n * uncompressible data is probably not useful. This function is used\n * only for the level=0 compression option.\n * NOTE: this function should be optimized to avoid extra copying from\n * window to pending_buf.\n */\nlocal block_state deflate_stored(s, flush)\n deflate_state *s;\n int flush;\n{\n /* Stored blocks are limited to 0xffff bytes, pending_buf is limited\n * to pending_buf_size, and each stored block has a 5 byte header:\n */\n ulg max_block_size = 0xffff;\n ulg max_start;\n\n if (max_block_size > s->pending_buf_size - 5) {\n max_block_size = s->pending_buf_size - 5;\n }\n\n /* Copy as much as possible from input to output: */\n for (;;) {\n /* Fill the window as much as possible: */\n if (s->lookahead <= 1) {\n\n Assert(s->strstart < s->w_size+MAX_DIST(s) ||\n\t\t s->block_start >= (long)s->w_size, \"slide too late\");\n\n fill_window(s);\n if (s->lookahead == 0 && flush == Z_NO_FLUSH) return need_more;\n\n if (s->lookahead == 0) break; /* flush the current block */\n }\n\tAssert(s->block_start >= 0L, \"block gone\");\n\n\ts->strstart += s->lookahead;\n\ts->lookahead = 0;\n\n\t/* Emit a stored block if pending_buf will be full: */\n \tmax_start = s->block_start + max_block_size;\n if (s->strstart == 0 || (ulg)s->strstart >= max_start) {\n\t /* strstart == 0 is possible when wraparound on 16-bit machine */\n\t s->lookahead = (uInt)(s->strstart - max_start);\n\t s->strstart = (uInt)max_start;\n FLUSH_BLOCK(s, 0);\n\t}\n\t/* Flush if we may have to slide, otherwise block_start may become\n * negative and the data will be gone:\n */\n if (s->strstart - (uInt)s->block_start >= MAX_DIST(s)) {\n FLUSH_BLOCK(s, 0);\n\t}\n }\n FLUSH_BLOCK(s, flush == Z_FINISH);\n return flush == Z_FINISH ? finish_done : block_done;\n}\n\n/* ===========================================================================\n * Compress as much as possible from the input stream, return the current\n * block state.\n * This function does not perform lazy evaluation of matches and inserts\n * new strings in the dictionary only for unmatched strings or for short\n * matches. It is used only for the fast compression options.\n */\nlocal block_state deflate_fast(s, flush)\n deflate_state *s;\n int flush;\n{\n IPos hash_head = NIL; /* head of the hash chain */\n int bflush; /* set if current block must be flushed */\n\n for (;;) {\n /* Make sure that we always have enough lookahead, except\n * at the end of the input file. We need MAX_MATCH bytes\n * for the next match, plus MIN_MATCH bytes to insert the\n * string following the next match.\n */\n if (s->lookahead < MIN_LOOKAHEAD) {\n fill_window(s);\n if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {\n\t return need_more;\n\t }\n if (s->lookahead == 0) break; /* flush the current block */\n }\n\n /* Insert the string window[strstart .. strstart+2] in the\n * dictionary, and set hash_head to the head of the hash chain:\n */\n if (s->lookahead >= MIN_MATCH) {\n INSERT_STRING(s, s->strstart, hash_head);\n }\n\n /* Find the longest match, discarding those <= prev_length.\n * At this point we have always match_length < MIN_MATCH\n */\n if (hash_head != NIL && s->strstart - hash_head <= MAX_DIST(s)) {\n /* To simplify the code, we prevent matches with the string\n * of window index 0 (in particular we have to avoid a match\n * of the string with itself at the start of the input file).\n */\n if (s->strategy != Z_HUFFMAN_ONLY) {\n s->match_length = longest_match (s, hash_head);\n }\n /* longest_match() sets match_start */\n }\n if (s->match_length >= MIN_MATCH) {\n check_match(s, s->strstart, s->match_start, s->match_length);\n\n bflush = _tr_tally(s, s->strstart - s->match_start,\n s->match_length - MIN_MATCH);\n\n s->lookahead -= s->match_length;\n\n /* Insert new strings in the hash table only if the match length\n * is not too large. This saves time but degrades compression.\n */\n if (s->match_length <= s->max_insert_length &&\n s->lookahead >= MIN_MATCH) {\n s->match_length--; /* string at strstart already in hash table */\n do {\n s->strstart++;\n INSERT_STRING(s, s->strstart, hash_head);\n /* strstart never exceeds WSIZE-MAX_MATCH, so there are\n * always MIN_MATCH bytes ahead.\n */\n } while (--s->match_length != 0);\n s->strstart++; \n } else {\n s->strstart += s->match_length;\n s->match_length = 0;\n s->ins_h = s->window[s->strstart];\n UPDATE_HASH(s, s->ins_h, s->window[s->strstart+1]);\n#if MIN_MATCH != 3\n Call UPDATE_HASH() MIN_MATCH-3 more times\n#endif\n /* If lookahead < MIN_MATCH, ins_h is garbage, but it does not\n * matter since it will be recomputed at next deflate call.\n */\n }\n } else {\n /* No match, output a literal byte */\n Tracevv((stderr,\"%c\", s->window[s->strstart]));\n bflush = _tr_tally (s, 0, s->window[s->strstart]);\n s->lookahead--;\n s->strstart++; \n }\n if (bflush) FLUSH_BLOCK(s, 0);\n }\n FLUSH_BLOCK(s, flush == Z_FINISH);\n return flush == Z_FINISH ? finish_done : block_done;\n}\n\n/* ===========================================================================\n * Same as above, but achieves better compression. We use a lazy\n * evaluation for matches: a match is finally adopted only if there is\n * no better match at the next window position.\n */\nlocal block_state deflate_slow(s, flush)\n deflate_state *s;\n int flush;\n{\n IPos hash_head = NIL; /* head of hash chain */\n int bflush; /* set if current block must be flushed */\n\n /* Process the input block. */\n for (;;) {\n /* Make sure that we always have enough lookahead, except\n * at the end of the input file. We need MAX_MATCH bytes\n * for the next match, plus MIN_MATCH bytes to insert the\n * string following the next match.\n */\n if (s->lookahead < MIN_LOOKAHEAD) {\n fill_window(s);\n if (s->lookahead < MIN_LOOKAHEAD && flush == Z_NO_FLUSH) {\n\t return need_more;\n\t }\n if (s->lookahead == 0) break; /* flush the current block */\n }\n\n /* Insert the string window[strstart .. strstart+2] in the\n * dictionary, and set hash_head to the head of the hash chain:\n */\n if (s->lookahead >= MIN_MATCH) {\n INSERT_STRING(s, s->strstart, hash_head);\n }\n\n /* Find the longest match, discarding those <= prev_length.\n */\n s->prev_length = s->match_length, s->prev_match = s->match_start;\n s->match_length = MIN_MATCH-1;\n\n if (hash_head != NIL && s->prev_length < s->max_lazy_match &&\n s->strstart - hash_head <= MAX_DIST(s)) {\n /* To simplify the code, we prevent matches with the string\n * of window index 0 (in particular we have to avoid a match\n * of the string with itself at the start of the input file).\n */\n if (s->strategy != Z_HUFFMAN_ONLY) {\n s->match_length = longest_match (s, hash_head);\n }\n /* longest_match() sets match_start */\n\n if (s->match_length <= 5 && (s->strategy == Z_FILTERED ||\n (s->match_length == MIN_MATCH &&\n s->strstart - s->match_start > TOO_FAR))) {\n\n /* If prev_match is also MIN_MATCH, match_start is garbage\n * but we will ignore the current match anyway.\n */\n s->match_length = MIN_MATCH-1;\n }\n }\n /* If there was a match at the previous step and the current\n * match is not better, output the previous match:\n */\n if (s->prev_length >= MIN_MATCH && s->match_length <= s->prev_length) {\n uInt max_insert = s->strstart + s->lookahead - MIN_MATCH;\n /* Do not insert strings in hash table beyond this. */\n\n check_match(s, s->strstart-1, s->prev_match, s->prev_length);\n\n bflush = _tr_tally(s, s->strstart -1 - s->prev_match,\n s->prev_length - MIN_MATCH);\n\n /* Insert in hash table all strings up to the end of the match.\n * strstart-1 and strstart are already inserted. If there is not\n * enough lookahead, the last two strings are not inserted in\n * the hash table.\n */\n s->lookahead -= s->prev_length-1;\n s->prev_length -= 2;\n do {\n if (++s->strstart <= max_insert) {\n INSERT_STRING(s, s->strstart, hash_head);\n }\n } while (--s->prev_length != 0);\n s->match_available = 0;\n s->match_length = MIN_MATCH-1;\n s->strstart++;\n\n if (bflush) FLUSH_BLOCK(s, 0);\n\n } else if (s->match_available) {\n /* If there was no match at the previous position, output a\n * single literal. If there was a match but the current match\n * is longer, truncate the previous match to a single literal.\n */\n Tracevv((stderr,\"%c\", s->window[s->strstart-1]));\n if (_tr_tally (s, 0, s->window[s->strstart-1])) {\n FLUSH_BLOCK_ONLY(s, 0);\n }\n s->strstart++;\n s->lookahead--;\n if (s->strm->avail_out == 0) return need_more;\n } else {\n /* There is no previous match to compare with, wait for\n * the next step to decide.\n */\n s->match_available = 1;\n s->strstart++;\n s->lookahead--;\n }\n }\n Assert (flush != Z_NO_FLUSH, \"no flush?\");\n if (s->match_available) {\n Tracevv((stderr,\"%c\", s->window[s->strstart-1]));\n _tr_tally (s, 0, s->window[s->strstart-1]);\n s->match_available = 0;\n }\n FLUSH_BLOCK(s, flush == Z_FINISH);\n return flush == Z_FINISH ? finish_done : block_done;\n}\n/* --- deflate.c */\n\n/* +++ trees.c */\n/* trees.c -- output deflated data using Huffman coding\n * Copyright (C) 1995-1996 Jean-loup Gailly\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/*\n * ALGORITHM\n *\n * The \"deflation\" process uses several Huffman trees. The more\n * common source values are represented by shorter bit sequences.\n *\n * Each code tree is stored in a compressed form which is itself\n * a Huffman encoding of the lengths of all the code strings (in\n * ascending order by source values). The actual code strings are\n * reconstructed from the lengths in the inflate process, as described\n * in the deflate specification.\n *\n * REFERENCES\n *\n * Deutsch, L.P.,\"'Deflate' Compressed Data Format Specification\".\n * Available in ftp.uu.net:/pub/archiving/zip/doc/deflate-1.1.doc\n *\n * Storer, James A.\n * Data Compression: Methods and Theory, pp. 49-50.\n * Computer Science Press, 1988. ISBN 0-7167-8156-5.\n *\n * Sedgewick, R.\n * Algorithms, p290.\n * Addison-Wesley, 1983. ISBN 0-201-06672-6.\n */\n\n/* From: trees.c,v 1.11 1996/07/24 13:41:06 me Exp $ */\n\n/* #include \"deflate.h\" */\n\n#ifdef DEBUG_ZLIB\n# include \n#endif\n\n/* ===========================================================================\n * Constants\n */\n\n#define MAX_BL_BITS 7\n/* Bit length codes must not exceed MAX_BL_BITS bits */\n\n#define END_BLOCK 256\n/* end of block literal code */\n\n#define REP_3_6 16\n/* repeat previous bit length 3-6 times (2 bits of repeat count) */\n\n#define REPZ_3_10 17\n/* repeat a zero length 3-10 times (3 bits of repeat count) */\n\n#define REPZ_11_138 18\n/* repeat a zero length 11-138 times (7 bits of repeat count) */\n\nlocal int extra_lbits[LENGTH_CODES] /* extra bits for each length code */\n = {0,0,0,0,0,0,0,0,1,1,1,1,2,2,2,2,3,3,3,3,4,4,4,4,5,5,5,5,0};\n\nlocal int extra_dbits[D_CODES] /* extra bits for each distance code */\n = {0,0,0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,8,8,9,9,10,10,11,11,12,12,13,13};\n\nlocal int extra_blbits[BL_CODES]/* extra bits for each bit length code */\n = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,7};\n\nlocal uch bl_order[BL_CODES]\n = {16,17,18,0,8,7,9,6,10,5,11,4,12,3,13,2,14,1,15};\n/* The lengths of the bit length codes are sent in order of decreasing\n * probability, to avoid transmitting the lengths for unused bit length codes.\n */\n\n#define Buf_size (8 * 2*sizeof(char))\n/* Number of bits used within bi_buf. (bi_buf might be implemented on\n * more than 16 bits on some systems.)\n */\n\n/* ===========================================================================\n * Local data. These are initialized only once.\n */\n\nlocal ct_data static_ltree[L_CODES+2];\n/* The static literal tree. Since the bit lengths are imposed, there is no\n * need for the L_CODES extra codes used during heap construction. However\n * The codes 286 and 287 are needed to build a canonical tree (see _tr_init\n * below).\n */\n\nlocal ct_data static_dtree[D_CODES];\n/* The static distance tree. (Actually a trivial tree since all codes use\n * 5 bits.)\n */\n\nlocal uch dist_code[512];\n/* distance codes. The first 256 values correspond to the distances\n * 3 .. 258, the last 256 values correspond to the top 8 bits of\n * the 15 bit distances.\n */\n\nlocal uch length_code[MAX_MATCH-MIN_MATCH+1];\n/* length code for each normalized match length (0 == MIN_MATCH) */\n\nlocal int base_length[LENGTH_CODES];\n/* First normalized length for each code (0 = MIN_MATCH) */\n\nlocal int base_dist[D_CODES];\n/* First normalized distance for each code (0 = distance of 1) */\n\nstruct static_tree_desc_s {\n ct_data *static_tree; /* static tree or NULL */\n intf *extra_bits; /* extra bits for each code or NULL */\n int extra_base; /* base index for extra_bits */\n int elems; /* max number of elements in the tree */\n int max_length; /* max bit length for the codes */\n};\n\nlocal static_tree_desc static_l_desc =\n{static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};\n\nlocal static_tree_desc static_d_desc =\n{static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};\n\nlocal static_tree_desc static_bl_desc =\n{(ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};\n\n/* ===========================================================================\n * Local (static) routines in this file.\n */\n\nlocal void tr_static_init OF((void));\nlocal void init_block OF((deflate_state *s));\nlocal void pqdownheap OF((deflate_state *s, ct_data *tree, int k));\nlocal void gen_bitlen OF((deflate_state *s, tree_desc *desc));\nlocal void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count));\nlocal void build_tree OF((deflate_state *s, tree_desc *desc));\nlocal void scan_tree OF((deflate_state *s, ct_data *tree, int max_code));\nlocal void send_tree OF((deflate_state *s, ct_data *tree, int max_code));\nlocal int build_bl_tree OF((deflate_state *s));\nlocal void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,\n int blcodes));\nlocal void compress_block OF((deflate_state *s, ct_data *ltree,\n ct_data *dtree));\nlocal void set_data_type OF((deflate_state *s));\nlocal unsigned bi_reverse OF((unsigned value, int length));\nlocal void bi_windup OF((deflate_state *s));\nlocal void bi_flush OF((deflate_state *s));\nlocal void copy_block OF((deflate_state *s, charf *buf, unsigned len,\n int header));\n\n#ifndef DEBUG_ZLIB\n# define send_code(s, c, tree) send_bits(s, tree[(c)].Code, tree[(c)].Len)\n /* Send a code of the given tree. c and tree must not have side effects */\n\n#else /* DEBUG_ZLIB */\n# define send_code(s, c, tree) \\\n { if (verbose>2) fprintf(stderr,\"\\ncd %3d \",(c)); \\\n send_bits(s, tree[c].Code, tree[c].Len); }\n#endif\n\n#define d_code(dist) \\\n ((dist) < 256 ? dist_code[dist] : dist_code[256+((dist)>>7)])\n/* Mapping from a distance to a distance code. dist is the distance - 1 and\n * must not have side effects. dist_code[256] and dist_code[257] are never\n * used.\n */\n\n/* ===========================================================================\n * Output a short LSB first on the stream.\n * IN assertion: there is enough room in pendingBuf.\n */\n#define put_short(s, w) { \\\n put_byte(s, (uch)((w) & 0xff)); \\\n put_byte(s, (uch)((ush)(w) >> 8)); \\\n}\n\n/* ===========================================================================\n * Send a value on a given number of bits.\n * IN assertion: length <= 16 and value fits in length bits.\n */\n#ifdef DEBUG_ZLIB\nlocal void send_bits OF((deflate_state *s, int value, int length));\n\nlocal void send_bits(s, value, length)\n deflate_state *s;\n int value; /* value to send */\n int length; /* number of bits */\n{\n Tracevv((stderr,\" l %2d v %4x \", length, value));\n Assert(length > 0 && length <= 15, \"invalid length\");\n s->bits_sent += (ulg)length;\n\n /* If not enough room in bi_buf, use (valid) bits from bi_buf and\n * (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))\n * unused bits in value.\n */\n if (s->bi_valid > (int)Buf_size - length) {\n s->bi_buf |= (value << s->bi_valid);\n put_short(s, s->bi_buf);\n s->bi_buf = (ush)value >> (Buf_size - s->bi_valid);\n s->bi_valid += length - Buf_size;\n } else {\n s->bi_buf |= value << s->bi_valid;\n s->bi_valid += length;\n }\n}\n#else /* !DEBUG_ZLIB */\n\n#define send_bits(s, value, length) \\\n{ int len = (length);\\\n if ((s)->bi_valid > (int)Buf_size - len) {\\\n int val = (value);\\\n (s)->bi_buf |= (val << (s)->bi_valid);\\\n put_short((s), (s)->bi_buf);\\\n (s)->bi_buf = (ush)val >> (Buf_size - (s)->bi_valid);\\\n (s)->bi_valid += len - Buf_size;\\\n } else {\\\n (s)->bi_buf |= (value) << (s)->bi_valid;\\\n (s)->bi_valid += len;\\\n }\\\n}\n#endif /* DEBUG_ZLIB */\n\n/* the arguments must not have side effects */\n\n/* ===========================================================================\n * Initialize the various 'constant' tables. In a multi-threaded environment,\n * this function may be called by two threads concurrently, but this is\n * harmless since both invocations do exactly the same thing.\n */\nlocal void tr_static_init()\n{\n static int static_init_done = 0;\n int n; /* iterates over tree elements */\n int bits; /* bit counter */\n int length; /* length value */\n int code; /* code value */\n int dist; /* distance index */\n ush bl_count[MAX_BITS+1];\n /* number of codes at each bit length for an optimal tree */\n\n if (static_init_done) return;\n\n /* Initialize the mapping length (0..255) -> length code (0..28) */\n length = 0;\n for (code = 0; code < LENGTH_CODES-1; code++) {\n base_length[code] = length;\n for (n = 0; n < (1< dist code (0..29) */\n dist = 0;\n for (code = 0 ; code < 16; code++) {\n base_dist[code] = dist;\n for (n = 0; n < (1<>= 7; /* from now on, all distances are divided by 128 */\n for ( ; code < D_CODES; code++) {\n base_dist[code] = dist << 7;\n for (n = 0; n < (1<<(extra_dbits[code]-7)); n++) {\n dist_code[256 + dist++] = (uch)code;\n }\n }\n Assert (dist == 256, \"tr_static_init: 256+dist != 512\");\n\n /* Construct the codes of the static literal tree */\n for (bits = 0; bits <= MAX_BITS; bits++) bl_count[bits] = 0;\n n = 0;\n while (n <= 143) static_ltree[n++].Len = 8, bl_count[8]++;\n while (n <= 255) static_ltree[n++].Len = 9, bl_count[9]++;\n while (n <= 279) static_ltree[n++].Len = 7, bl_count[7]++;\n while (n <= 287) static_ltree[n++].Len = 8, bl_count[8]++;\n /* Codes 286 and 287 do not exist, but we must include them in the\n * tree construction to get a canonical Huffman tree (longest code\n * all ones)\n */\n gen_codes((ct_data *)static_ltree, L_CODES+1, bl_count);\n\n /* The static distance tree is trivial: */\n for (n = 0; n < D_CODES; n++) {\n static_dtree[n].Len = 5;\n static_dtree[n].Code = bi_reverse((unsigned)n, 5);\n }\n static_init_done = 1;\n}\n\n/* ===========================================================================\n * Initialize the tree data structures for a new zlib stream.\n */\nvoid _tr_init(s)\n deflate_state *s;\n{\n tr_static_init();\n\n s->compressed_len = 0L;\n\n s->l_desc.dyn_tree = s->dyn_ltree;\n s->l_desc.stat_desc = &static_l_desc;\n\n s->d_desc.dyn_tree = s->dyn_dtree;\n s->d_desc.stat_desc = &static_d_desc;\n\n s->bl_desc.dyn_tree = s->bl_tree;\n s->bl_desc.stat_desc = &static_bl_desc;\n\n s->bi_buf = 0;\n s->bi_valid = 0;\n s->last_eob_len = 8; /* enough lookahead for inflate */\n#ifdef DEBUG_ZLIB\n s->bits_sent = 0L;\n#endif\n\n /* Initialize the first block of the first file: */\n init_block(s);\n}\n\n/* ===========================================================================\n * Initialize a new block.\n */\nlocal void init_block(s)\n deflate_state *s;\n{\n int n; /* iterates over tree elements */\n\n /* Initialize the trees. */\n for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;\n for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;\n for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;\n\n s->dyn_ltree[END_BLOCK].Freq = 1;\n s->opt_len = s->static_len = 0L;\n s->last_lit = s->matches = 0;\n}\n\n#define SMALLEST 1\n/* Index within the heap array of least frequent node in the Huffman tree */\n\n\n/* ===========================================================================\n * Remove the smallest element from the heap and recreate the heap with\n * one less element. Updates heap and heap_len.\n */\n#define pqremove(s, tree, top) \\\n{\\\n top = s->heap[SMALLEST]; \\\n s->heap[SMALLEST] = s->heap[s->heap_len--]; \\\n pqdownheap(s, tree, SMALLEST); \\\n}\n\n/* ===========================================================================\n * Compares to subtrees, using the tree depth as tie breaker when\n * the subtrees have equal frequency. This minimizes the worst case length.\n */\n#define smaller(tree, n, m, depth) \\\n (tree[n].Freq < tree[m].Freq || \\\n (tree[n].Freq == tree[m].Freq && depth[n] <= depth[m]))\n\n/* ===========================================================================\n * Restore the heap property by moving down the tree starting at node k,\n * exchanging a node with the smallest of its two sons if necessary, stopping\n * when the heap property is re-established (each father smaller than its\n * two sons).\n */\nlocal void pqdownheap(s, tree, k)\n deflate_state *s;\n ct_data *tree; /* the tree to restore */\n int k; /* node to move down */\n{\n int v = s->heap[k];\n int j = k << 1; /* left son of k */\n while (j <= s->heap_len) {\n /* Set j to the smallest of the two sons: */\n if (j < s->heap_len &&\n smaller(tree, s->heap[j+1], s->heap[j], s->depth)) {\n j++;\n }\n /* Exit if v is smaller than both sons */\n if (smaller(tree, v, s->heap[j], s->depth)) break;\n\n /* Exchange v with the smallest son */\n s->heap[k] = s->heap[j]; k = j;\n\n /* And continue down the tree, setting j to the left son of k */\n j <<= 1;\n }\n s->heap[k] = v;\n}\n\n/* ===========================================================================\n * Compute the optimal bit lengths for a tree and update the total bit length\n * for the current block.\n * IN assertion: the fields freq and dad are set, heap[heap_max] and\n * above are the tree nodes sorted by increasing frequency.\n * OUT assertions: the field len is set to the optimal bit length, the\n * array bl_count contains the frequencies for each bit length.\n * The length opt_len is updated; static_len is also updated if stree is\n * not null.\n */\nlocal void gen_bitlen(s, desc)\n deflate_state *s;\n tree_desc *desc; /* the tree descriptor */\n{\n ct_data *tree = desc->dyn_tree;\n int max_code = desc->max_code;\n ct_data *stree = desc->stat_desc->static_tree;\n intf *extra = desc->stat_desc->extra_bits;\n int base = desc->stat_desc->extra_base;\n int max_length = desc->stat_desc->max_length;\n int h; /* heap index */\n int n, m; /* iterate over the tree elements */\n int bits; /* bit length */\n int xbits; /* extra bits */\n ush f; /* frequency */\n int overflow = 0; /* number of elements with bit length too large */\n\n for (bits = 0; bits <= MAX_BITS; bits++) s->bl_count[bits] = 0;\n\n /* In a first pass, compute the optimal bit lengths (which may\n * overflow in the case of the bit length tree).\n */\n tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */\n\n for (h = s->heap_max+1; h < HEAP_SIZE; h++) {\n n = s->heap[h];\n bits = tree[tree[n].Dad].Len + 1;\n if (bits > max_length) bits = max_length, overflow++;\n tree[n].Len = (ush)bits;\n /* We overwrite tree[n].Dad which is no longer needed */\n\n if (n > max_code) continue; /* not a leaf node */\n\n s->bl_count[bits]++;\n xbits = 0;\n if (n >= base) xbits = extra[n-base];\n f = tree[n].Freq;\n s->opt_len += (ulg)f * (bits + xbits);\n if (stree) s->static_len += (ulg)f * (stree[n].Len + xbits);\n }\n if (overflow == 0) return;\n\n Trace((stderr,\"\\nbit length overflow\\n\"));\n /* This happens for example on obj2 and pic of the Calgary corpus */\n\n /* Find the first bit length which could increase: */\n do {\n bits = max_length-1;\n while (s->bl_count[bits] == 0) bits--;\n s->bl_count[bits]--; /* move one leaf down the tree */\n s->bl_count[bits+1] += 2; /* move one overflow item as its brother */\n s->bl_count[max_length]--;\n /* The brother of the overflow item also moves one step up,\n * but this does not affect bl_count[max_length]\n */\n overflow -= 2;\n } while (overflow > 0);\n\n /* Now recompute all bit lengths, scanning in increasing frequency.\n * h is still equal to HEAP_SIZE. (It is simpler to reconstruct all\n * lengths instead of fixing only the wrong ones. This idea is taken\n * from 'ar' written by Haruhiko Okumura.)\n */\n for (bits = max_length; bits != 0; bits--) {\n n = s->bl_count[bits];\n while (n != 0) {\n m = s->heap[--h];\n if (m > max_code) continue;\n if (tree[m].Len != (unsigned) bits) {\n Trace((stderr,\"code %d bits %d->%d\\n\", m, tree[m].Len, bits));\n s->opt_len += ((long)bits - (long)tree[m].Len)\n *(long)tree[m].Freq;\n tree[m].Len = (ush)bits;\n }\n n--;\n }\n }\n}\n\n/* ===========================================================================\n * Generate the codes for a given tree and bit counts (which need not be\n * optimal).\n * IN assertion: the array bl_count contains the bit length statistics for\n * the given tree and the field len is set for all tree elements.\n * OUT assertion: the field code is set for all tree elements of non\n * zero code length.\n */\nlocal void gen_codes (tree, max_code, bl_count)\n ct_data *tree; /* the tree to decorate */\n int max_code; /* largest code with non zero frequency */\n ushf *bl_count; /* number of codes at each bit length */\n{\n ush next_code[MAX_BITS+1]; /* next code value for each bit length */\n ush code = 0; /* running code value */\n int bits; /* bit index */\n int n; /* code index */\n\n /* The distribution counts are first used to generate the code values\n * without bit reversal.\n */\n for (bits = 1; bits <= MAX_BITS; bits++) {\n next_code[bits] = code = (code + bl_count[bits-1]) << 1;\n }\n /* Check that the bit counts in bl_count are consistent. The last code\n * must be all ones.\n */\n Assert (code + bl_count[MAX_BITS]-1 == (1<dyn_tree;\n ct_data *stree = desc->stat_desc->static_tree;\n int elems = desc->stat_desc->elems;\n int n, m; /* iterate over heap elements */\n int max_code = -1; /* largest code with non zero frequency */\n int node; /* new node being created */\n\n /* Construct the initial heap, with least frequent element in\n * heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].\n * heap[0] is not used.\n */\n s->heap_len = 0, s->heap_max = HEAP_SIZE;\n\n for (n = 0; n < elems; n++) {\n if (tree[n].Freq != 0) {\n s->heap[++(s->heap_len)] = max_code = n;\n s->depth[n] = 0;\n } else {\n tree[n].Len = 0;\n }\n }\n\n /* The pkzip format requires that at least one distance code exists,\n * and that at least one bit should be sent even if there is only one\n * possible code. So to avoid special checks later on we force at least\n * two codes of non zero frequency.\n */\n while (s->heap_len < 2) {\n node = s->heap[++(s->heap_len)] = (max_code < 2 ? ++max_code : 0);\n tree[node].Freq = 1;\n s->depth[node] = 0;\n s->opt_len--; if (stree) s->static_len -= stree[node].Len;\n /* node is 0 or 1 so it does not have extra bits */\n }\n desc->max_code = max_code;\n\n /* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,\n * establish sub-heaps of increasing lengths:\n */\n for (n = s->heap_len/2; n >= 1; n--) pqdownheap(s, tree, n);\n\n /* Construct the Huffman tree by repeatedly combining the least two\n * frequent nodes.\n */\n node = elems; /* next internal node of the tree */\n do {\n pqremove(s, tree, n); /* n = node of least frequency */\n m = s->heap[SMALLEST]; /* m = node of next least frequency */\n\n s->heap[--(s->heap_max)] = n; /* keep the nodes sorted by frequency */\n s->heap[--(s->heap_max)] = m;\n\n /* Create a new node father of n and m */\n tree[node].Freq = tree[n].Freq + tree[m].Freq;\n s->depth[node] = (uch) (MAX(s->depth[n], s->depth[m]) + 1);\n tree[n].Dad = tree[m].Dad = (ush)node;\n#ifdef DUMP_BL_TREE\n if (tree == s->bl_tree) {\n fprintf(stderr,\"\\nnode %d(%d), sons %d(%d) %d(%d)\",\n node, tree[node].Freq, n, tree[n].Freq, m, tree[m].Freq);\n }\n#endif\n /* and insert the new node in the heap */\n s->heap[SMALLEST] = node++;\n pqdownheap(s, tree, SMALLEST);\n\n } while (s->heap_len >= 2);\n\n s->heap[--(s->heap_max)] = s->heap[SMALLEST];\n\n /* At this point, the fields freq and dad are set. We can now\n * generate the bit lengths.\n */\n gen_bitlen(s, (tree_desc *)desc);\n\n /* The field len is now set, we can generate the bit codes */\n gen_codes ((ct_data *)tree, max_code, s->bl_count);\n}\n\n/* ===========================================================================\n * Scan a literal or distance tree to determine the frequencies of the codes\n * in the bit length tree.\n */\nlocal void scan_tree (s, tree, max_code)\n deflate_state *s;\n ct_data *tree; /* the tree to be scanned */\n int max_code; /* and its largest code of non zero frequency */\n{\n int n; /* iterates over all tree elements */\n int prevlen = -1; /* last emitted length */\n int curlen; /* length of current code */\n int nextlen = tree[0].Len; /* length of next code */\n int count = 0; /* repeat count of the current code */\n int max_count = 7; /* max repeat count */\n int min_count = 4; /* min repeat count */\n\n if (nextlen == 0) max_count = 138, min_count = 3;\n tree[max_code+1].Len = (ush)0xffff; /* guard */\n\n for (n = 0; n <= max_code; n++) {\n curlen = nextlen; nextlen = tree[n+1].Len;\n if (++count < max_count && curlen == nextlen) {\n continue;\n } else if (count < min_count) {\n s->bl_tree[curlen].Freq += count;\n } else if (curlen != 0) {\n if (curlen != prevlen) s->bl_tree[curlen].Freq++;\n s->bl_tree[REP_3_6].Freq++;\n } else if (count <= 10) {\n s->bl_tree[REPZ_3_10].Freq++;\n } else {\n s->bl_tree[REPZ_11_138].Freq++;\n }\n count = 0; prevlen = curlen;\n if (nextlen == 0) {\n max_count = 138, min_count = 3;\n } else if (curlen == nextlen) {\n max_count = 6, min_count = 3;\n } else {\n max_count = 7, min_count = 4;\n }\n }\n}\n\n/* ===========================================================================\n * Send a literal or distance tree in compressed form, using the codes in\n * bl_tree.\n */\nlocal void send_tree (s, tree, max_code)\n deflate_state *s;\n ct_data *tree; /* the tree to be scanned */\n int max_code; /* and its largest code of non zero frequency */\n{\n int n; /* iterates over all tree elements */\n int prevlen = -1; /* last emitted length */\n int curlen; /* length of current code */\n int nextlen = tree[0].Len; /* length of next code */\n int count = 0; /* repeat count of the current code */\n int max_count = 7; /* max repeat count */\n int min_count = 4; /* min repeat count */\n\n /* tree[max_code+1].Len = -1; */ /* guard already set */\n if (nextlen == 0) max_count = 138, min_count = 3;\n\n for (n = 0; n <= max_code; n++) {\n curlen = nextlen; nextlen = tree[n+1].Len;\n if (++count < max_count && curlen == nextlen) {\n continue;\n } else if (count < min_count) {\n do { send_code(s, curlen, s->bl_tree); } while (--count != 0);\n\n } else if (curlen != 0) {\n if (curlen != prevlen) {\n send_code(s, curlen, s->bl_tree); count--;\n }\n Assert(count >= 3 && count <= 6, \" 3_6?\");\n send_code(s, REP_3_6, s->bl_tree); send_bits(s, count-3, 2);\n\n } else if (count <= 10) {\n send_code(s, REPZ_3_10, s->bl_tree); send_bits(s, count-3, 3);\n\n } else {\n send_code(s, REPZ_11_138, s->bl_tree); send_bits(s, count-11, 7);\n }\n count = 0; prevlen = curlen;\n if (nextlen == 0) {\n max_count = 138, min_count = 3;\n } else if (curlen == nextlen) {\n max_count = 6, min_count = 3;\n } else {\n max_count = 7, min_count = 4;\n }\n }\n}\n\n/* ===========================================================================\n * Construct the Huffman tree for the bit lengths and return the index in\n * bl_order of the last bit length code to send.\n */\nlocal int build_bl_tree(s)\n deflate_state *s;\n{\n int max_blindex; /* index of last bit length code of non zero freq */\n\n /* Determine the bit length frequencies for literal and distance trees */\n scan_tree(s, (ct_data *)s->dyn_ltree, s->l_desc.max_code);\n scan_tree(s, (ct_data *)s->dyn_dtree, s->d_desc.max_code);\n\n /* Build the bit length tree: */\n build_tree(s, (tree_desc *)(&(s->bl_desc)));\n /* opt_len now includes the length of the tree representations, except\n * the lengths of the bit lengths codes and the 5+5+4 bits for the counts.\n */\n\n /* Determine the number of bit length codes to send. The pkzip format\n * requires that at least 4 bit length codes be sent. (appnote.txt says\n * 3 but the actual value used is 4.)\n */\n for (max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {\n if (s->bl_tree[bl_order[max_blindex]].Len != 0) break;\n }\n /* Update opt_len to include the bit length tree and counts */\n s->opt_len += 3*(max_blindex+1) + 5+5+4;\n Tracev((stderr, \"\\ndyn trees: dyn %ld, stat %ld\",\n s->opt_len, s->static_len));\n\n return max_blindex;\n}\n\n/* ===========================================================================\n * Send the header for a block using dynamic Huffman trees: the counts, the\n * lengths of the bit length codes, the literal tree and the distance tree.\n * IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.\n */\nlocal void send_all_trees(s, lcodes, dcodes, blcodes)\n deflate_state *s;\n int lcodes, dcodes, blcodes; /* number of codes for each tree */\n{\n int rank; /* index in bl_order */\n\n Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, \"not enough codes\");\n Assert (lcodes <= L_CODES && dcodes <= D_CODES && blcodes <= BL_CODES,\n \"too many codes\");\n Tracev((stderr, \"\\nbl counts: \"));\n send_bits(s, lcodes-257, 5); /* not +255 as stated in appnote.txt */\n send_bits(s, dcodes-1, 5);\n send_bits(s, blcodes-4, 4); /* not -3 as stated in appnote.txt */\n for (rank = 0; rank < blcodes; rank++) {\n Tracev((stderr, \"\\nbl code %2d \", bl_order[rank]));\n send_bits(s, s->bl_tree[bl_order[rank]].Len, 3);\n }\n Tracev((stderr, \"\\nbl tree: sent %ld\", s->bits_sent));\n\n send_tree(s, (ct_data *)s->dyn_ltree, lcodes-1); /* literal tree */\n Tracev((stderr, \"\\nlit tree: sent %ld\", s->bits_sent));\n\n send_tree(s, (ct_data *)s->dyn_dtree, dcodes-1); /* distance tree */\n Tracev((stderr, \"\\ndist tree: sent %ld\", s->bits_sent));\n}\n\n/* ===========================================================================\n * Send a stored block\n */\nvoid _tr_stored_block(s, buf, stored_len, eof)\n deflate_state *s;\n charf *buf; /* input block */\n ulg stored_len; /* length of input block */\n int eof; /* true if this is the last block for a file */\n{\n send_bits(s, (STORED_BLOCK<<1)+eof, 3); /* send block type */\n s->compressed_len = (s->compressed_len + 3 + 7) & (ulg)~7L;\n s->compressed_len += (stored_len + 4) << 3;\n\n copy_block(s, buf, (unsigned)stored_len, 1); /* with header */\n}\n\n/* Send just the `stored block' type code without any length bytes or data.\n */\nvoid _tr_stored_type_only(s)\n deflate_state *s;\n{\n send_bits(s, (STORED_BLOCK << 1), 3);\n bi_windup(s);\n s->compressed_len = (s->compressed_len + 3) & ~7L;\n}\n\n\n/* ===========================================================================\n * Send one empty static block to give enough lookahead for inflate.\n * This takes 10 bits, of which 7 may remain in the bit buffer.\n * The current inflate code requires 9 bits of lookahead. If the\n * last two codes for the previous block (real code plus EOB) were coded\n * on 5 bits or less, inflate may have only 5+3 bits of lookahead to decode\n * the last real code. In this case we send two empty static blocks instead\n * of one. (There are no problems if the previous block is stored or fixed.)\n * To simplify the code, we assume the worst case of last real code encoded\n * on one bit only.\n */\nvoid _tr_align(s)\n deflate_state *s;\n{\n send_bits(s, STATIC_TREES<<1, 3);\n send_code(s, END_BLOCK, static_ltree);\n s->compressed_len += 10L; /* 3 for block type, 7 for EOB */\n bi_flush(s);\n /* Of the 10 bits for the empty block, we have already sent\n * (10 - bi_valid) bits. The lookahead for the last real code (before\n * the EOB of the previous block) was thus at least one plus the length\n * of the EOB plus what we have just sent of the empty static block.\n */\n if (1 + s->last_eob_len + 10 - s->bi_valid < 9) {\n send_bits(s, STATIC_TREES<<1, 3);\n send_code(s, END_BLOCK, static_ltree);\n s->compressed_len += 10L;\n bi_flush(s);\n }\n s->last_eob_len = 7;\n}\n\n/* ===========================================================================\n * Determine the best encoding for the current block: dynamic trees, static\n * trees or store, and output the encoded block to the zip file. This function\n * returns the total compressed length for the file so far.\n */\nulg _tr_flush_block(s, buf, stored_len, eof)\n deflate_state *s;\n charf *buf; /* input block, or NULL if too old */\n ulg stored_len; /* length of input block */\n int eof; /* true if this is the last block for a file */\n{\n ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */\n int max_blindex = 0; /* index of last bit length code of non zero freq */\n\n /* Build the Huffman trees unless a stored block is forced */\n if (s->level > 0) {\n\n\t /* Check if the file is ascii or binary */\n\tif (s->data_type == Z_UNKNOWN) set_data_type(s);\n\n\t/* Construct the literal and distance trees */\n\tbuild_tree(s, (tree_desc *)(&(s->l_desc)));\n\tTracev((stderr, \"\\nlit data: dyn %ld, stat %ld\", s->opt_len,\n\t\ts->static_len));\n\n\tbuild_tree(s, (tree_desc *)(&(s->d_desc)));\n\tTracev((stderr, \"\\ndist data: dyn %ld, stat %ld\", s->opt_len,\n\t\ts->static_len));\n\t/* At this point, opt_len and static_len are the total bit lengths of\n\t * the compressed block data, excluding the tree representations.\n\t */\n\n\t/* Build the bit length tree for the above two trees, and get the index\n\t * in bl_order of the last bit length code to send.\n\t */\n\tmax_blindex = build_bl_tree(s);\n\n\t/* Determine the best encoding. Compute first the block length in bytes*/\n\topt_lenb = (s->opt_len+3+7)>>3;\n\tstatic_lenb = (s->static_len+3+7)>>3;\n\n\tTracev((stderr, \"\\nopt %lu(%lu) stat %lu(%lu) stored %lu lit %u \",\n\t\topt_lenb, s->opt_len, static_lenb, s->static_len, stored_len,\n\t\ts->last_lit));\n\n\tif (static_lenb <= opt_lenb) opt_lenb = static_lenb;\n\n } else {\n Assert(buf != (char*)0, \"lost buf\");\n\topt_lenb = static_lenb = stored_len + 5; /* force a stored block */\n }\n\n /* If compression failed and this is the first and last block,\n * and if the .zip file can be seeked (to rewrite the local header),\n * the whole file is transformed into a stored file:\n */\n#ifdef STORED_FILE_OK\n# ifdef FORCE_STORED_FILE\n if (eof && s->compressed_len == 0L) { /* force stored file */\n# else\n if (stored_len <= opt_lenb && eof && s->compressed_len==0L && seekable()) {\n# endif\n /* Since LIT_BUFSIZE <= 2*WSIZE, the input data must be there: */\n if (buf == (charf*)0) error (\"block vanished\");\n\n copy_block(s, buf, (unsigned)stored_len, 0); /* without header */\n s->compressed_len = stored_len << 3;\n s->method = STORED;\n } else\n#endif /* STORED_FILE_OK */\n\n#ifdef FORCE_STORED\n if (buf != (char*)0) { /* force stored block */\n#else\n if (stored_len+4 <= opt_lenb && buf != (char*)0) {\n /* 4: two words for the lengths */\n#endif\n /* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.\n * Otherwise we can't have processed more than WSIZE input bytes since\n * the last block flush, because compression would have been\n * successful. If LIT_BUFSIZE <= WSIZE, it is never too late to\n * transform a block into a stored block.\n */\n _tr_stored_block(s, buf, stored_len, eof);\n\n#ifdef FORCE_STATIC\n } else if (static_lenb >= 0) { /* force static trees */\n#else\n } else if (static_lenb == opt_lenb) {\n#endif\n send_bits(s, (STATIC_TREES<<1)+eof, 3);\n compress_block(s, (ct_data *)static_ltree, (ct_data *)static_dtree);\n s->compressed_len += 3 + s->static_len;\n } else {\n send_bits(s, (DYN_TREES<<1)+eof, 3);\n send_all_trees(s, s->l_desc.max_code+1, s->d_desc.max_code+1,\n max_blindex+1);\n compress_block(s, (ct_data *)s->dyn_ltree, (ct_data *)s->dyn_dtree);\n s->compressed_len += 3 + s->opt_len;\n }\n Assert (s->compressed_len == s->bits_sent, \"bad compressed size\");\n init_block(s);\n\n if (eof) {\n bi_windup(s);\n s->compressed_len += 7; /* align on byte boundary */\n }\n Tracev((stderr,\"\\ncomprlen %lu(%lu) \", s->compressed_len>>3,\n s->compressed_len-7*eof));\n\n return s->compressed_len >> 3;\n}\n\n/* ===========================================================================\n * Save the match info and tally the frequency counts. Return true if\n * the current block must be flushed.\n */\nint _tr_tally (s, dist, lc)\n deflate_state *s;\n unsigned dist; /* distance of matched string */\n unsigned lc; /* match length-MIN_MATCH or unmatched char (if dist==0) */\n{\n s->d_buf[s->last_lit] = (ush)dist;\n s->l_buf[s->last_lit++] = (uch)lc;\n if (dist == 0) {\n /* lc is the unmatched char */\n s->dyn_ltree[lc].Freq++;\n } else {\n s->matches++;\n /* Here, lc is the match length - MIN_MATCH */\n dist--; /* dist = match distance - 1 */\n Assert((ush)dist < (ush)MAX_DIST(s) &&\n (ush)lc <= (ush)(MAX_MATCH-MIN_MATCH) &&\n (ush)d_code(dist) < (ush)D_CODES, \"_tr_tally: bad match\");\n\n s->dyn_ltree[length_code[lc]+LITERALS+1].Freq++;\n s->dyn_dtree[d_code(dist)].Freq++;\n }\n\n /* Try to guess if it is profitable to stop the current block here */\n if (s->level > 2 && (s->last_lit & 0xfff) == 0) {\n /* Compute an upper bound for the compressed length */\n ulg out_length = (ulg)s->last_lit*8L;\n ulg in_length = (ulg)((long)s->strstart - s->block_start);\n int dcode;\n for (dcode = 0; dcode < D_CODES; dcode++) {\n out_length += (ulg)s->dyn_dtree[dcode].Freq *\n (5L+extra_dbits[dcode]);\n }\n out_length >>= 3;\n Tracev((stderr,\"\\nlast_lit %u, in %ld, out ~%ld(%ld%%) \",\n s->last_lit, in_length, out_length,\n 100L - out_length*100L/in_length));\n if (s->matches < s->last_lit/2 && out_length < in_length/2) return 1;\n }\n return (s->last_lit == s->lit_bufsize-1);\n /* We avoid equality with lit_bufsize because of wraparound at 64K\n * on 16 bit machines and because stored blocks are restricted to\n * 64K-1 bytes.\n */\n}\n\n/* ===========================================================================\n * Send the block data compressed using the given Huffman trees\n */\nlocal void compress_block(s, ltree, dtree)\n deflate_state *s;\n ct_data *ltree; /* literal tree */\n ct_data *dtree; /* distance tree */\n{\n unsigned dist; /* distance of matched string */\n int lc; /* match length or unmatched char (if dist == 0) */\n unsigned lx = 0; /* running index in l_buf */\n unsigned code; /* the code to send */\n int extra; /* number of extra bits to send */\n\n if (s->last_lit != 0) do {\n dist = s->d_buf[lx];\n lc = s->l_buf[lx++];\n if (dist == 0) {\n send_code(s, lc, ltree); /* send a literal byte */\n Tracecv(isgraph(lc), (stderr,\" '%c' \", lc));\n } else {\n /* Here, lc is the match length - MIN_MATCH */\n code = length_code[lc];\n send_code(s, code+LITERALS+1, ltree); /* send the length code */\n extra = extra_lbits[code];\n if (extra != 0) {\n lc -= base_length[code];\n send_bits(s, lc, extra); /* send the extra length bits */\n }\n dist--; /* dist is now the match distance - 1 */\n code = d_code(dist);\n Assert (code < D_CODES, \"bad d_code\");\n\n send_code(s, code, dtree); /* send the distance code */\n extra = extra_dbits[code];\n if (extra != 0) {\n dist -= base_dist[code];\n send_bits(s, dist, extra); /* send the extra distance bits */\n }\n } /* literal or match pair ? */\n\n /* Check that the overlay between pending_buf and d_buf+l_buf is ok: */\n Assert(s->pending < s->lit_bufsize + 2*lx, \"pendingBuf overflow\");\n\n } while (lx < s->last_lit);\n\n send_code(s, END_BLOCK, ltree);\n s->last_eob_len = ltree[END_BLOCK].Len;\n}\n\n/* ===========================================================================\n * Set the data type to ASCII or BINARY, using a crude approximation:\n * binary if more than 20% of the bytes are <= 6 or >= 128, ascii otherwise.\n * IN assertion: the fields freq of dyn_ltree are set and the total of all\n * frequencies does not exceed 64K (to fit in an int on 16 bit machines).\n */\nlocal void set_data_type(s)\n deflate_state *s;\n{\n int n = 0;\n unsigned ascii_freq = 0;\n unsigned bin_freq = 0;\n while (n < 7) bin_freq += s->dyn_ltree[n++].Freq;\n while (n < 128) ascii_freq += s->dyn_ltree[n++].Freq;\n while (n < LITERALS) bin_freq += s->dyn_ltree[n++].Freq;\n s->data_type = (Byte)(bin_freq > (ascii_freq >> 2) ? Z_BINARY : Z_ASCII);\n}\n\n/* ===========================================================================\n * Reverse the first len bits of a code, using straightforward code (a faster\n * method would use a table)\n * IN assertion: 1 <= len <= 15\n */\nlocal unsigned bi_reverse(code, len)\n unsigned code; /* the value to invert */\n int len; /* its bit length */\n{\n register unsigned res = 0;\n do {\n res |= code & 1;\n code >>= 1, res <<= 1;\n } while (--len > 0);\n return res >> 1;\n}\n\n/* ===========================================================================\n * Flush the bit buffer, keeping at most 7 bits in it.\n */\nlocal void bi_flush(s)\n deflate_state *s;\n{\n if (s->bi_valid == 16) {\n put_short(s, s->bi_buf);\n s->bi_buf = 0;\n s->bi_valid = 0;\n } else if (s->bi_valid >= 8) {\n put_byte(s, (Byte)s->bi_buf);\n s->bi_buf >>= 8;\n s->bi_valid -= 8;\n }\n}\n\n/* ===========================================================================\n * Flush the bit buffer and align the output on a byte boundary\n */\nlocal void bi_windup(s)\n deflate_state *s;\n{\n if (s->bi_valid > 8) {\n put_short(s, s->bi_buf);\n } else if (s->bi_valid > 0) {\n put_byte(s, (Byte)s->bi_buf);\n }\n s->bi_buf = 0;\n s->bi_valid = 0;\n#ifdef DEBUG_ZLIB\n s->bits_sent = (s->bits_sent+7) & ~7;\n#endif\n}\n\n/* ===========================================================================\n * Copy a stored block, storing first the length and its\n * one's complement if requested.\n */\nlocal void copy_block(s, buf, len, header)\n deflate_state *s;\n charf *buf; /* the input data */\n unsigned len; /* its length */\n int header; /* true if block header must be written */\n{\n bi_windup(s); /* align on byte boundary */\n s->last_eob_len = 8; /* enough lookahead for inflate */\n\n if (header) {\n put_short(s, (ush)len); \n put_short(s, (ush)~len);\n#ifdef DEBUG_ZLIB\n s->bits_sent += 2*16;\n#endif\n }\n#ifdef DEBUG_ZLIB\n s->bits_sent += (ulg)len<<3;\n#endif\n /* bundle up the put_byte(s, *buf++) calls */\n zmemcpy(&s->pending_buf[s->pending], buf, len);\n s->pending += len;\n}\n/* --- trees.c */\n\n/* +++ inflate.c */\n/* inflate.c -- zlib interface to inflate modules\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* #include \"zutil.h\" */\n\n/* +++ infblock.h */\n/* infblock.h -- header to use infblock.c\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* WARNING: this file should *not* be used by applications. It is\n part of the implementation of the compression library and is\n subject to change. Applications should only use zlib.h.\n */\n\nstruct inflate_blocks_state;\ntypedef struct inflate_blocks_state FAR inflate_blocks_statef;\n\nextern inflate_blocks_statef * inflate_blocks_new OF((\n z_streamp z,\n check_func c, /* check function */\n uInt w)); /* window size */\n\nextern int inflate_blocks OF((\n inflate_blocks_statef *,\n z_streamp ,\n int)); /* initial return code */\n\nextern void inflate_blocks_reset OF((\n inflate_blocks_statef *,\n z_streamp ,\n uLongf *)); /* check value on output */\n\nextern int inflate_blocks_free OF((\n inflate_blocks_statef *,\n z_streamp ,\n uLongf *)); /* check value on output */\n\nextern void inflate_set_dictionary OF((\n inflate_blocks_statef *s,\n const Bytef *d, /* dictionary */\n uInt n)); /* dictionary length */\n\nextern int inflate_addhistory OF((\n inflate_blocks_statef *,\n z_streamp));\n\nextern int inflate_packet_flush OF((\n inflate_blocks_statef *));\n/* --- infblock.h */\n\n#ifndef NO_DUMMY_DECL\nstruct inflate_blocks_state {int dummy;}; /* for buggy compilers */\n#endif\n\n/* inflate private state */\nstruct internal_state {\n\n /* mode */\n enum {\n METHOD, /* waiting for method byte */\n FLAG, /* waiting for flag byte */\n DICT4, /* four dictionary check bytes to go */\n DICT3, /* three dictionary check bytes to go */\n DICT2, /* two dictionary check bytes to go */\n DICT1, /* one dictionary check byte to go */\n DICT0, /* waiting for inflateSetDictionary */\n BLOCKS, /* decompressing blocks */\n CHECK4, /* four check bytes to go */\n CHECK3, /* three check bytes to go */\n CHECK2, /* two check bytes to go */\n CHECK1, /* one check byte to go */\n DONE, /* finished check, done */\n BAD} /* got an error--stay here */\n mode; /* current inflate mode */\n\n /* mode dependent information */\n union {\n uInt method; /* if FLAGS, method byte */\n struct {\n uLong was; /* computed check value */\n uLong need; /* stream check value */\n } check; /* if CHECK, check values to compare */\n uInt marker; /* if BAD, inflateSync's marker bytes count */\n } sub; /* submode */\n\n /* mode independent information */\n int nowrap; /* flag for no wrapper */\n uInt wbits; /* log2(window size) (8..15, defaults to 15) */\n inflate_blocks_statef \n *blocks; /* current inflate_blocks state */\n\n};\n\n\nint inflateReset(z)\nz_streamp z;\n{\n uLong c;\n\n if (z == Z_NULL || z->state == Z_NULL)\n return Z_STREAM_ERROR;\n z->total_in = z->total_out = 0;\n z->msg = Z_NULL;\n z->state->mode = z->state->nowrap ? BLOCKS : METHOD;\n inflate_blocks_reset(z->state->blocks, z, &c);\n Trace((stderr, \"inflate: reset\\n\"));\n return Z_OK;\n}\n\n\nint inflateEnd(z)\nz_streamp z;\n{\n uLong c;\n\n if (z == Z_NULL || z->state == Z_NULL || z->zfree == Z_NULL)\n return Z_STREAM_ERROR;\n if (z->state->blocks != Z_NULL)\n inflate_blocks_free(z->state->blocks, z, &c);\n ZFREE(z, z->state);\n z->state = Z_NULL;\n Trace((stderr, \"inflate: end\\n\"));\n return Z_OK;\n}\n\n\nint inflateInit2_(z, w, version, stream_size)\nz_streamp z;\nint w;\nconst char *version;\nint stream_size;\n{\n if (version == Z_NULL || version[0] != ZLIB_VERSION[0] ||\n stream_size != sizeof(z_stream))\n return Z_VERSION_ERROR;\n\n /* initialize state */\n if (z == Z_NULL)\n return Z_STREAM_ERROR;\n z->msg = Z_NULL;\n#ifndef NO_ZCFUNCS\n if (z->zalloc == Z_NULL)\n {\n z->zalloc = zcalloc;\n z->opaque = (voidpf)0;\n }\n if (z->zfree == Z_NULL) z->zfree = zcfree;\n#endif\n if ((z->state = (struct internal_state FAR *)\n ZALLOC(z,1,sizeof(struct internal_state))) == Z_NULL)\n return Z_MEM_ERROR;\n z->state->blocks = Z_NULL;\n\n /* handle undocumented nowrap option (no zlib header or check) */\n z->state->nowrap = 0;\n if (w < 0)\n {\n w = - w;\n z->state->nowrap = 1;\n }\n\n /* set window size */\n if (w < 8 || w > 15)\n {\n inflateEnd(z);\n return Z_STREAM_ERROR;\n }\n z->state->wbits = (uInt)w;\n\n /* create inflate_blocks state */\n if ((z->state->blocks =\n inflate_blocks_new(z, z->state->nowrap ? Z_NULL : adler32, (uInt)1 << w))\n == Z_NULL)\n {\n inflateEnd(z);\n return Z_MEM_ERROR;\n }\n Trace((stderr, \"inflate: allocated\\n\"));\n\n /* reset state */\n inflateReset(z);\n return Z_OK;\n}\n\n\nint inflateInit_(z, version, stream_size)\nz_streamp z;\nconst char *version;\nint stream_size;\n{\n return inflateInit2_(z, DEF_WBITS, version, stream_size);\n}\n\n\n#define NEEDBYTE {if(z->avail_in==0)goto empty;r=Z_OK;}\n#define NEXTBYTE (z->avail_in--,z->total_in++,*z->next_in++)\n\nint inflate(z, f)\nz_streamp z;\nint f;\n{\n int r;\n uInt b;\n\n if (z == Z_NULL || z->state == Z_NULL || z->next_in == Z_NULL || f < 0)\n return Z_STREAM_ERROR;\n r = Z_BUF_ERROR;\n while (1) switch (z->state->mode)\n {\n case METHOD:\n NEEDBYTE\n if (((z->state->sub.method = NEXTBYTE) & 0xf) != Z_DEFLATED)\n {\n z->state->mode = BAD;\n z->msg = (char*)\"unknown compression method\";\n z->state->sub.marker = 5; /* can't try inflateSync */\n break;\n }\n if ((z->state->sub.method >> 4) + 8 > z->state->wbits)\n {\n z->state->mode = BAD;\n z->msg = (char*)\"invalid window size\";\n z->state->sub.marker = 5; /* can't try inflateSync */\n break;\n }\n z->state->mode = FLAG;\n case FLAG:\n NEEDBYTE\n b = NEXTBYTE;\n if (((z->state->sub.method << 8) + b) % 31)\n {\n z->state->mode = BAD;\n z->msg = (char*)\"incorrect header check\";\n z->state->sub.marker = 5; /* can't try inflateSync */\n break;\n }\n Trace((stderr, \"inflate: zlib header ok\\n\"));\n if (!(b & PRESET_DICT))\n {\n z->state->mode = BLOCKS;\n\tbreak;\n }\n z->state->mode = DICT4;\n case DICT4:\n NEEDBYTE\n z->state->sub.check.need = (uLong)NEXTBYTE << 24;\n z->state->mode = DICT3;\n case DICT3:\n NEEDBYTE\n z->state->sub.check.need += (uLong)NEXTBYTE << 16;\n z->state->mode = DICT2;\n case DICT2:\n NEEDBYTE\n z->state->sub.check.need += (uLong)NEXTBYTE << 8;\n z->state->mode = DICT1;\n case DICT1:\n NEEDBYTE\n z->state->sub.check.need += (uLong)NEXTBYTE;\n z->adler = z->state->sub.check.need;\n z->state->mode = DICT0;\n return Z_NEED_DICT;\n case DICT0:\n z->state->mode = BAD;\n z->msg = (char*)\"need dictionary\";\n z->state->sub.marker = 0; /* can try inflateSync */\n return Z_STREAM_ERROR;\n case BLOCKS:\n r = inflate_blocks(z->state->blocks, z, r);\n if (f == Z_PACKET_FLUSH && z->avail_in == 0 && z->avail_out != 0)\n\t r = inflate_packet_flush(z->state->blocks);\n if (r == Z_DATA_ERROR)\n {\n z->state->mode = BAD;\n z->state->sub.marker = 0; /* can try inflateSync */\n break;\n }\n if (r != Z_STREAM_END)\n return r;\n r = Z_OK;\n inflate_blocks_reset(z->state->blocks, z, &z->state->sub.check.was);\n if (z->state->nowrap)\n {\n z->state->mode = DONE;\n break;\n }\n z->state->mode = CHECK4;\n case CHECK4:\n NEEDBYTE\n z->state->sub.check.need = (uLong)NEXTBYTE << 24;\n z->state->mode = CHECK3;\n case CHECK3:\n NEEDBYTE\n z->state->sub.check.need += (uLong)NEXTBYTE << 16;\n z->state->mode = CHECK2;\n case CHECK2:\n NEEDBYTE\n z->state->sub.check.need += (uLong)NEXTBYTE << 8;\n z->state->mode = CHECK1;\n case CHECK1:\n NEEDBYTE\n z->state->sub.check.need += (uLong)NEXTBYTE;\n\n if (z->state->sub.check.was != z->state->sub.check.need)\n {\n z->state->mode = BAD;\n z->msg = (char*)\"incorrect data check\";\n z->state->sub.marker = 5; /* can't try inflateSync */\n break;\n }\n Trace((stderr, \"inflate: zlib check ok\\n\"));\n z->state->mode = DONE;\n case DONE:\n return Z_STREAM_END;\n case BAD:\n return Z_DATA_ERROR;\n default:\n return Z_STREAM_ERROR;\n }\n\n empty:\n if (f != Z_PACKET_FLUSH)\n return r;\n z->state->mode = BAD;\n z->msg = (char *)\"need more for packet flush\";\n z->state->sub.marker = 0; /* can try inflateSync */\n return Z_DATA_ERROR;\n}\n\n\nint inflateSetDictionary(z, dictionary, dictLength)\nz_streamp z;\nconst Bytef *dictionary;\nuInt dictLength;\n{\n uInt length = dictLength;\n\n if (z == Z_NULL || z->state == Z_NULL || z->state->mode != DICT0)\n return Z_STREAM_ERROR;\n\n if (adler32(1L, dictionary, dictLength) != z->adler) return Z_DATA_ERROR;\n z->adler = 1L;\n\n if (length >= ((uInt)1<state->wbits))\n {\n length = (1<state->wbits)-1;\n dictionary += dictLength - length;\n }\n inflate_set_dictionary(z->state->blocks, dictionary, length);\n z->state->mode = BLOCKS;\n return Z_OK;\n}\n\n/*\n * This subroutine adds the data at next_in/avail_in to the output history\n * without performing any output. The output buffer must be \"caught up\";\n * i.e. no pending output (hence s->read equals s->write), and the state must\n * be BLOCKS (i.e. we should be willing to see the start of a series of\n * BLOCKS). On exit, the output will also be caught up, and the checksum\n * will have been updated if need be.\n */\n\nint inflateIncomp(z)\nz_stream *z;\n{\n if (z->state->mode != BLOCKS)\n\treturn Z_DATA_ERROR;\n return inflate_addhistory(z->state->blocks, z);\n}\n\n\nint inflateSync(z)\nz_streamp z;\n{\n uInt n; /* number of bytes to look at */\n Bytef *p; /* pointer to bytes */\n uInt m; /* number of marker bytes found in a row */\n uLong r, w; /* temporaries to save total_in and total_out */\n\n /* set up */\n if (z == Z_NULL || z->state == Z_NULL)\n return Z_STREAM_ERROR;\n if (z->state->mode != BAD)\n {\n z->state->mode = BAD;\n z->state->sub.marker = 0;\n }\n if ((n = z->avail_in) == 0)\n return Z_BUF_ERROR;\n p = z->next_in;\n m = z->state->sub.marker;\n\n /* search */\n while (n && m < 4)\n {\n if (*p == (Byte)(m < 2 ? 0 : 0xff))\n m++;\n else if (*p)\n m = 0;\n else\n m = 4 - m;\n p++, n--;\n }\n\n /* restore */\n z->total_in += p - z->next_in;\n z->next_in = p;\n z->avail_in = n;\n z->state->sub.marker = m;\n\n /* return no joy or set up to restart on a new block */\n if (m != 4)\n return Z_DATA_ERROR;\n r = z->total_in; w = z->total_out;\n inflateReset(z);\n z->total_in = r; z->total_out = w;\n z->state->mode = BLOCKS;\n return Z_OK;\n}\n\n#undef NEEDBYTE\n#undef NEXTBYTE\n/* --- inflate.c */\n\n/* +++ infblock.c */\n/* infblock.c -- interpret and process block types to last block\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* #include \"zutil.h\" */\n/* #include \"infblock.h\" */\n\n/* +++ inftrees.h */\n/* inftrees.h -- header to use inftrees.c\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* WARNING: this file should *not* be used by applications. It is\n part of the implementation of the compression library and is\n subject to change. Applications should only use zlib.h.\n */\n\n/* Huffman code lookup table entry--this entry is four bytes for machines\n that have 16-bit pointers (e.g. PC's in the small or medium model). */\n\ntypedef struct inflate_huft_s FAR inflate_huft;\n\nstruct inflate_huft_s {\n union {\n struct {\n Byte Exop; /* number of extra bits or operation */\n Byte Bits; /* number of bits in this code or subcode */\n } what;\n Bytef *pad; /* pad structure to a power of 2 (4 bytes for */\n } word; /* 16-bit, 8 bytes for 32-bit machines) */\n union {\n uInt Base; /* literal, length base, or distance base */\n inflate_huft *Next; /* pointer to next level of table */\n } more;\n};\n\n#ifdef DEBUG_ZLIB\n extern uInt inflate_hufts;\n#endif\n\nextern int inflate_trees_bits OF((\n uIntf *, /* 19 code lengths */\n uIntf *, /* bits tree desired/actual depth */\n inflate_huft * FAR *, /* bits tree result */\n z_streamp )); /* for zalloc, zfree functions */\n\nextern int inflate_trees_dynamic OF((\n uInt, /* number of literal/length codes */\n uInt, /* number of distance codes */\n uIntf *, /* that many (total) code lengths */\n uIntf *, /* literal desired/actual bit depth */\n uIntf *, /* distance desired/actual bit depth */\n inflate_huft * FAR *, /* literal/length tree result */\n inflate_huft * FAR *, /* distance tree result */\n z_streamp )); /* for zalloc, zfree functions */\n\nextern int inflate_trees_fixed OF((\n uIntf *, /* literal desired/actual bit depth */\n uIntf *, /* distance desired/actual bit depth */\n inflate_huft * FAR *, /* literal/length tree result */\n inflate_huft * FAR *)); /* distance tree result */\n\nextern int inflate_trees_free OF((\n inflate_huft *, /* tables to free */\n z_streamp )); /* for zfree function */\n\n/* --- inftrees.h */\n\n/* +++ infcodes.h */\n/* infcodes.h -- header to use infcodes.c\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* WARNING: this file should *not* be used by applications. It is\n part of the implementation of the compression library and is\n subject to change. Applications should only use zlib.h.\n */\n\nstruct inflate_codes_state;\ntypedef struct inflate_codes_state FAR inflate_codes_statef;\n\nextern inflate_codes_statef *inflate_codes_new OF((\n uInt, uInt,\n inflate_huft *, inflate_huft *,\n z_streamp ));\n\nextern int inflate_codes OF((\n inflate_blocks_statef *,\n z_streamp ,\n int));\n\nextern void inflate_codes_free OF((\n inflate_codes_statef *,\n z_streamp ));\n\n/* --- infcodes.h */\n\n/* +++ infutil.h */\n/* infutil.h -- types and macros common to blocks and codes\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* WARNING: this file should *not* be used by applications. It is\n part of the implementation of the compression library and is\n subject to change. Applications should only use zlib.h.\n */\n\n#ifndef _INFUTIL_H\n#define _INFUTIL_H\n\ntypedef enum {\n TYPE, /* get type bits (3, including end bit) */\n LENS, /* get lengths for stored */\n STORED, /* processing stored block */\n TABLE, /* get table lengths */\n BTREE, /* get bit lengths tree for a dynamic block */\n DTREE, /* get length, distance trees for a dynamic block */\n CODES, /* processing fixed or dynamic block */\n DRY, /* output remaining window bytes */\n DONEB, /* finished last block, done */\n BADB} /* got a data error--stuck here */\ninflate_block_mode;\n\n/* inflate blocks semi-private state */\nstruct inflate_blocks_state {\n\n /* mode */\n inflate_block_mode mode; /* current inflate_block mode */\n\n /* mode dependent information */\n union {\n uInt left; /* if STORED, bytes left to copy */\n struct {\n uInt table; /* table lengths (14 bits) */\n uInt index; /* index into blens (or border) */\n uIntf *blens; /* bit lengths of codes */\n uInt bb; /* bit length tree depth */\n inflate_huft *tb; /* bit length decoding tree */\n } trees; /* if DTREE, decoding info for trees */\n struct {\n inflate_huft *tl;\n inflate_huft *td; /* trees to free */\n inflate_codes_statef \n *codes;\n } decode; /* if CODES, current state */\n } sub; /* submode */\n uInt last; /* true if this block is the last block */\n\n /* mode independent information */\n uInt bitk; /* bits in bit buffer */\n uLong bitb; /* bit buffer */\n Bytef *window; /* sliding window */\n Bytef *end; /* one byte after sliding window */\n Bytef *read; /* window read pointer */\n Bytef *write; /* window write pointer */\n check_func checkfn; /* check function */\n uLong check; /* check on output */\n\n};\n\n\n/* defines for inflate input/output */\n/* update pointers and return */\n#define UPDBITS {s->bitb=b;s->bitk=k;}\n#define UPDIN {z->avail_in=n;z->total_in+=p-z->next_in;z->next_in=p;}\n#define UPDOUT {s->write=q;}\n#define UPDATE {UPDBITS UPDIN UPDOUT}\n#define LEAVE {UPDATE return inflate_flush(s,z,r);}\n/* get bytes and bits */\n#define LOADIN {p=z->next_in;n=z->avail_in;b=s->bitb;k=s->bitk;}\n#define NEEDBYTE {if(n)r=Z_OK;else LEAVE}\n#define NEXTBYTE (n--,*p++)\n#define NEEDBITS(j) {while(k<(j)){NEEDBYTE;b|=((uLong)NEXTBYTE)<>=(j);k-=(j);}\n/* output bytes */\n#define WAVAIL (uInt)(qread?s->read-q-1:s->end-q)\n#define LOADOUT {q=s->write;m=(uInt)WAVAIL;}\n#define WWRAP {if(q==s->end&&s->read!=s->window){q=s->window;m=(uInt)WAVAIL;}}\n#define FLUSH {UPDOUT r=inflate_flush(s,z,r); LOADOUT}\n#define NEEDOUT {if(m==0){WWRAP if(m==0){FLUSH WWRAP if(m==0) LEAVE}}r=Z_OK;}\n#define OUTBYTE(a) {*q++=(Byte)(a);m--;}\n/* load local pointers */\n#define LOAD {LOADIN LOADOUT}\n\n/* masks for lower bits (size given to avoid silly warnings with Visual C++) */\nextern uInt inflate_mask[17];\n\n/* copy as much as possible from the sliding window to the output area */\nextern int inflate_flush OF((\n inflate_blocks_statef *,\n z_streamp ,\n int));\n\n#ifndef NO_DUMMY_DECL\nstruct internal_state {int dummy;}; /* for buggy compilers */\n#endif\n\n#endif\n/* --- infutil.h */\n\n#ifndef NO_DUMMY_DECL\nstruct inflate_codes_state {int dummy;}; /* for buggy compilers */\n#endif\n\n/* Table for deflate from PKZIP's appnote.txt. */\nlocal const uInt border[] = { /* Order of the bit length code lengths */\n 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15};\n\n/*\n Notes beyond the 1.93a appnote.txt:\n\n 1. Distance pointers never point before the beginning of the output\n stream.\n 2. Distance pointers can point back across blocks, up to 32k away.\n 3. There is an implied maximum of 7 bits for the bit length table and\n 15 bits for the actual data.\n 4. If only one code exists, then it is encoded using one bit. (Zero\n would be more efficient, but perhaps a little confusing.) If two\n codes exist, they are coded using one bit each (0 and 1).\n 5. There is no way of sending zero distance codes--a dummy must be\n sent if there are none. (History: a pre 2.0 version of PKZIP would\n store blocks with no distance codes, but this was discovered to be\n too harsh a criterion.) Valid only for 1.93a. 2.04c does allow\n zero distance codes, which is sent as one code of zero bits in\n length.\n 6. There are up to 286 literal/length codes. Code 256 represents the\n end-of-block. Note however that the static length tree defines\n 288 codes just to fill out the Huffman codes. Codes 286 and 287\n cannot be used though, since there is no length base or extra bits\n defined for them. Similarily, there are up to 30 distance codes.\n However, static trees define 32 codes (all 5 bits) to fill out the\n Huffman codes, but the last two had better not show up in the data.\n 7. Unzip can check dynamic Huffman blocks for complete code sets.\n The exception is that a single code would not be complete (see #4).\n 8. The five bits following the block type is really the number of\n literal codes sent minus 257.\n 9. Length codes 8,16,16 are interpreted as 13 length codes of 8 bits\n (1+6+6). Therefore, to output three times the length, you output\n three codes (1+1+1), whereas to output four times the same length,\n you only need two codes (1+3). Hmm.\n 10. In the tree reconstruction algorithm, Code = Code + Increment\n only if BitLength(i) is not zero. (Pretty obvious.)\n 11. Correction: 4 Bits: # of Bit Length codes - 4 (4 - 19)\n 12. Note: length code 284 can represent 227-258, but length code 285\n really is 258. The last length deserves its own, short code\n since it gets used a lot in very redundant files. The length\n 258 is special since 258 - 3 (the min match length) is 255.\n 13. The literal/length and distance code bit lengths are read as a\n single stream of lengths. It is possible (and advantageous) for\n a repeat code (16, 17, or 18) to go across the boundary between\n the two sets of lengths.\n */\n\n\nvoid inflate_blocks_reset(s, z, c)\ninflate_blocks_statef *s;\nz_streamp z;\nuLongf *c;\n{\n if (s->checkfn != Z_NULL)\n *c = s->check;\n if (s->mode == BTREE || s->mode == DTREE)\n ZFREE(z, s->sub.trees.blens);\n if (s->mode == CODES)\n {\n inflate_codes_free(s->sub.decode.codes, z);\n inflate_trees_free(s->sub.decode.td, z);\n inflate_trees_free(s->sub.decode.tl, z);\n }\n s->mode = TYPE;\n s->bitk = 0;\n s->bitb = 0;\n s->read = s->write = s->window;\n if (s->checkfn != Z_NULL)\n z->adler = s->check = (*s->checkfn)(0L, Z_NULL, 0);\n Trace((stderr, \"inflate: blocks reset\\n\"));\n}\n\n\ninflate_blocks_statef *inflate_blocks_new(z, c, w)\nz_streamp z;\ncheck_func c;\nuInt w;\n{\n inflate_blocks_statef *s;\n\n if ((s = (inflate_blocks_statef *)ZALLOC\n (z,1,sizeof(struct inflate_blocks_state))) == Z_NULL)\n return s;\n if ((s->window = (Bytef *)ZALLOC(z, 1, w)) == Z_NULL)\n {\n ZFREE(z, s);\n return Z_NULL;\n }\n s->end = s->window + w;\n s->checkfn = c;\n s->mode = TYPE;\n Trace((stderr, \"inflate: blocks allocated\\n\"));\n inflate_blocks_reset(s, z, &s->check);\n return s;\n}\n\n\n#ifdef DEBUG_ZLIB\n extern uInt inflate_hufts;\n#endif\nint inflate_blocks(s, z, r)\ninflate_blocks_statef *s;\nz_streamp z;\nint r;\n{\n uInt t; /* temporary storage */\n uLong b; /* bit buffer */\n uInt k; /* bits in bit buffer */\n Bytef *p; /* input data pointer */\n uInt n; /* bytes available there */\n Bytef *q; /* output window write pointer */\n uInt m; /* bytes to end of window or read pointer */\n\n /* copy input/output information to locals (UPDATE macro restores) */\n LOAD\n\n /* process input based on current state */\n while (1) switch (s->mode)\n {\n case TYPE:\n NEEDBITS(3)\n t = (uInt)b & 7;\n s->last = t & 1;\n switch (t >> 1)\n {\n case 0: /* stored */\n Trace((stderr, \"inflate: stored block%s\\n\",\n s->last ? \" (last)\" : \"\"));\n DUMPBITS(3)\n t = k & 7; /* go to byte boundary */\n DUMPBITS(t)\n s->mode = LENS; /* get length of stored block */\n break;\n case 1: /* fixed */\n Trace((stderr, \"inflate: fixed codes block%s\\n\",\n s->last ? \" (last)\" : \"\"));\n {\n uInt bl, bd;\n inflate_huft *tl, *td;\n\n inflate_trees_fixed(&bl, &bd, &tl, &td);\n s->sub.decode.codes = inflate_codes_new(bl, bd, tl, td, z);\n if (s->sub.decode.codes == Z_NULL)\n {\n r = Z_MEM_ERROR;\n LEAVE\n }\n s->sub.decode.tl = Z_NULL; /* don't try to free these */\n s->sub.decode.td = Z_NULL;\n }\n DUMPBITS(3)\n s->mode = CODES;\n break;\n case 2: /* dynamic */\n Trace((stderr, \"inflate: dynamic codes block%s\\n\",\n s->last ? \" (last)\" : \"\"));\n DUMPBITS(3)\n s->mode = TABLE;\n break;\n case 3: /* illegal */\n DUMPBITS(3)\n s->mode = BADB;\n z->msg = (char*)\"invalid block type\";\n r = Z_DATA_ERROR;\n LEAVE\n }\n break;\n case LENS:\n NEEDBITS(32)\n if ((((~b) >> 16) & 0xffff) != (b & 0xffff))\n {\n s->mode = BADB;\n z->msg = (char*)\"invalid stored block lengths\";\n r = Z_DATA_ERROR;\n LEAVE\n }\n s->sub.left = (uInt)b & 0xffff;\n b = k = 0; /* dump bits */\n Tracev((stderr, \"inflate: stored length %u\\n\", s->sub.left));\n s->mode = s->sub.left ? STORED : (s->last ? DRY : TYPE);\n break;\n case STORED:\n if (n == 0)\n LEAVE\n NEEDOUT\n t = s->sub.left;\n if (t > n) t = n;\n if (t > m) t = m;\n zmemcpy(q, p, t);\n p += t; n -= t;\n q += t; m -= t;\n if ((s->sub.left -= t) != 0)\n break;\n Tracev((stderr, \"inflate: stored end, %lu total out\\n\",\n z->total_out + (q >= s->read ? q - s->read :\n (s->end - s->read) + (q - s->window))));\n s->mode = s->last ? DRY : TYPE;\n break;\n case TABLE:\n NEEDBITS(14)\n s->sub.trees.table = t = (uInt)b & 0x3fff;\n#ifndef PKZIP_BUG_WORKAROUND\n if ((t & 0x1f) > 29 || ((t >> 5) & 0x1f) > 29)\n {\n s->mode = BADB;\n z->msg = (char*)\"too many length or distance symbols\";\n r = Z_DATA_ERROR;\n LEAVE\n }\n#endif\n t = 258 + (t & 0x1f) + ((t >> 5) & 0x1f);\n if (t < 19)\n t = 19;\n if ((s->sub.trees.blens = (uIntf*)ZALLOC(z, t, sizeof(uInt))) == Z_NULL)\n {\n r = Z_MEM_ERROR;\n LEAVE\n }\n DUMPBITS(14)\n s->sub.trees.index = 0;\n Tracev((stderr, \"inflate: table sizes ok\\n\"));\n s->mode = BTREE;\n case BTREE:\n while (s->sub.trees.index < 4 + (s->sub.trees.table >> 10))\n {\n NEEDBITS(3)\n s->sub.trees.blens[border[s->sub.trees.index++]] = (uInt)b & 7;\n DUMPBITS(3)\n }\n while (s->sub.trees.index < 19)\n s->sub.trees.blens[border[s->sub.trees.index++]] = 0;\n s->sub.trees.bb = 7;\n t = inflate_trees_bits(s->sub.trees.blens, &s->sub.trees.bb,\n &s->sub.trees.tb, z);\n if (t != Z_OK)\n {\n r = t;\n if (r == Z_DATA_ERROR) {\n ZFREE(z, s->sub.trees.blens);\n s->mode = BADB;\n }\n LEAVE\n }\n s->sub.trees.index = 0;\n Tracev((stderr, \"inflate: bits tree ok\\n\"));\n s->mode = DTREE;\n case DTREE:\n while (t = s->sub.trees.table,\n s->sub.trees.index < 258 + (t & 0x1f) + ((t >> 5) & 0x1f))\n {\n inflate_huft *h;\n uInt i, j, c;\n\n t = s->sub.trees.bb;\n NEEDBITS(t)\n h = s->sub.trees.tb + ((uInt)b & inflate_mask[t]);\n t = h->word.what.Bits;\n c = h->more.Base;\n if (c < 16)\n {\n DUMPBITS(t)\n s->sub.trees.blens[s->sub.trees.index++] = c;\n }\n else /* c == 16..18 */\n {\n i = c == 18 ? 7 : c - 14;\n j = c == 18 ? 11 : 3;\n NEEDBITS(t + i)\n DUMPBITS(t)\n j += (uInt)b & inflate_mask[i];\n DUMPBITS(i)\n i = s->sub.trees.index;\n t = s->sub.trees.table;\n if (i + j > 258 + (t & 0x1f) + ((t >> 5) & 0x1f) ||\n (c == 16 && i < 1))\n {\n inflate_trees_free(s->sub.trees.tb, z);\n ZFREE(z, s->sub.trees.blens);\n s->mode = BADB;\n z->msg = (char*)\"invalid bit length repeat\";\n r = Z_DATA_ERROR;\n LEAVE\n }\n c = c == 16 ? s->sub.trees.blens[i - 1] : 0;\n do {\n s->sub.trees.blens[i++] = c;\n } while (--j);\n s->sub.trees.index = i;\n }\n }\n inflate_trees_free(s->sub.trees.tb, z);\n s->sub.trees.tb = Z_NULL;\n {\n uInt bl, bd;\n inflate_huft *tl, *td;\n inflate_codes_statef *c;\n\n bl = 9; /* must be <= 9 for lookahead assumptions */\n bd = 6; /* must be <= 9 for lookahead assumptions */\n t = s->sub.trees.table;\n#ifdef DEBUG_ZLIB\n inflate_hufts = 0;\n#endif\n t = inflate_trees_dynamic(257 + (t & 0x1f), 1 + ((t >> 5) & 0x1f),\n s->sub.trees.blens, &bl, &bd, &tl, &td, z);\n if (t != Z_OK)\n {\n if (t == (uInt)Z_DATA_ERROR) {\n ZFREE(z, s->sub.trees.blens);\n s->mode = BADB;\n }\n r = t;\n LEAVE\n }\n Tracev((stderr, \"inflate: trees ok, %d * %d bytes used\\n\",\n inflate_hufts, sizeof(inflate_huft)));\n if ((c = inflate_codes_new(bl, bd, tl, td, z)) == Z_NULL)\n {\n inflate_trees_free(td, z);\n inflate_trees_free(tl, z);\n r = Z_MEM_ERROR;\n LEAVE\n }\n\t/*\n\t * this ZFREE must occur *BEFORE* we mess with sub.decode, because\n\t * sub.trees is union'd with sub.decode.\n\t */\n ZFREE(z, s->sub.trees.blens);\n s->sub.decode.codes = c;\n s->sub.decode.tl = tl;\n s->sub.decode.td = td;\n }\n s->mode = CODES;\n case CODES:\n UPDATE\n if ((r = inflate_codes(s, z, r)) != Z_STREAM_END)\n return inflate_flush(s, z, r);\n r = Z_OK;\n inflate_codes_free(s->sub.decode.codes, z);\n inflate_trees_free(s->sub.decode.td, z);\n inflate_trees_free(s->sub.decode.tl, z);\n LOAD\n Tracev((stderr, \"inflate: codes end, %lu total out\\n\",\n z->total_out + (q >= s->read ? q - s->read :\n (s->end - s->read) + (q - s->window))));\n if (!s->last)\n {\n s->mode = TYPE;\n break;\n }\n if (k > 7) /* return unused byte, if any */\n {\n Assert(k < 16, \"inflate_codes grabbed too many bytes\")\n k -= 8;\n n++;\n p--; /* can always return one */\n }\n s->mode = DRY;\n case DRY:\n FLUSH\n if (s->read != s->write)\n LEAVE\n s->mode = DONEB;\n case DONEB:\n r = Z_STREAM_END;\n LEAVE\n case BADB:\n r = Z_DATA_ERROR;\n LEAVE\n default:\n r = Z_STREAM_ERROR;\n LEAVE\n }\n}\n\n\nint inflate_blocks_free(s, z, c)\ninflate_blocks_statef *s;\nz_streamp z;\nuLongf *c;\n{\n inflate_blocks_reset(s, z, c);\n ZFREE(z, s->window);\n ZFREE(z, s);\n Trace((stderr, \"inflate: blocks freed\\n\"));\n return Z_OK;\n}\n\n\nvoid inflate_set_dictionary(s, d, n)\ninflate_blocks_statef *s;\nconst Bytef *d;\nuInt n;\n{\n zmemcpy((charf *)s->window, d, n);\n s->read = s->write = s->window + n;\n}\n\n/*\n * This subroutine adds the data at next_in/avail_in to the output history\n * without performing any output. The output buffer must be \"caught up\";\n * i.e. no pending output (hence s->read equals s->write), and the state must\n * be BLOCKS (i.e. we should be willing to see the start of a series of\n * BLOCKS). On exit, the output will also be caught up, and the checksum\n * will have been updated if need be.\n */\nint inflate_addhistory(s, z)\ninflate_blocks_statef *s;\nz_stream *z;\n{\n uLong b; /* bit buffer */ /* NOT USED HERE */\n uInt k; /* bits in bit buffer */ /* NOT USED HERE */\n uInt t; /* temporary storage */\n Bytef *p; /* input data pointer */\n uInt n; /* bytes available there */\n Bytef *q; /* output window write pointer */\n uInt m; /* bytes to end of window or read pointer */\n\n if (s->read != s->write)\n\treturn Z_STREAM_ERROR;\n if (s->mode != TYPE)\n\treturn Z_DATA_ERROR;\n\n /* we're ready to rock */\n LOAD\n /* while there is input ready, copy to output buffer, moving\n * pointers as needed.\n */\n while (n) {\n\tt = n; /* how many to do */\n\t/* is there room until end of buffer? */\n\tif (t > m) t = m;\n\t/* update check information */\n\tif (s->checkfn != Z_NULL)\n\t s->check = (*s->checkfn)(s->check, q, t);\n\tzmemcpy(q, p, t);\n\tq += t;\n\tp += t;\n\tn -= t;\n\tz->total_out += t;\n\ts->read = q; /* drag read pointer forward */\n/* WWRAP */ \t/* expand WWRAP macro by hand to handle s->read */\n\tif (q == s->end) {\n\t s->read = q = s->window;\n\t m = WAVAIL;\n\t}\n }\n UPDATE\n return Z_OK;\n}\n\n\n/*\n * At the end of a Deflate-compressed PPP packet, we expect to have seen\n * a `stored' block type value but not the (zero) length bytes.\n */\nint inflate_packet_flush(s)\n inflate_blocks_statef *s;\n{\n if (s->mode != LENS)\n\treturn Z_DATA_ERROR;\n s->mode = TYPE;\n return Z_OK;\n}\n/* --- infblock.c */\n\n/* +++ inftrees.c */\n/* inftrees.c -- generate Huffman trees for efficient decoding\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* #include \"zutil.h\" */\n/* #include \"inftrees.h\" */\n\nchar inflate_copyright[] = \" inflate 1.0.4 Copyright 1995-1996 Mark Adler \";\n/*\n If you use the zlib library in a product, an acknowledgment is welcome\n in the documentation of your product. If for some reason you cannot\n include such an acknowledgment, I would appreciate that you keep this\n copyright string in the executable of your product.\n */\n\n#ifndef NO_DUMMY_DECL\nstruct internal_state {int dummy;}; /* for buggy compilers */\n#endif\n\n/* simplify the use of the inflate_huft type with some defines */\n#define base more.Base\n#define next more.Next\n#define exop word.what.Exop\n#define bits word.what.Bits\n\n\nlocal int huft_build OF((\n uIntf *, /* code lengths in bits */\n uInt, /* number of codes */\n uInt, /* number of \"simple\" codes */\n const uIntf *, /* list of base values for non-simple codes */\n const uIntf *, /* list of extra bits for non-simple codes */\n inflate_huft * FAR*,/* result: starting table */\n uIntf *, /* maximum lookup bits (returns actual) */\n z_streamp )); /* for zalloc function */\n\nlocal voidpf falloc OF((\n voidpf, /* opaque pointer (not used) */\n uInt, /* number of items */\n uInt)); /* size of item */\n\n/* Tables for deflate from PKZIP's appnote.txt. */\nlocal const uInt cplens[31] = { /* Copy lengths for literal codes 257..285 */\n 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31,\n 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258, 0, 0};\n /* see note #13 above about 258 */\nlocal const uInt cplext[31] = { /* Extra bits for literal codes 257..285 */\n 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2,\n 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0, 112, 112}; /* 112==invalid */\nlocal const uInt cpdist[30] = { /* Copy offsets for distance codes 0..29 */\n 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193,\n 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145,\n 8193, 12289, 16385, 24577};\nlocal const uInt cpdext[30] = { /* Extra bits for distance codes */\n 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6,\n 7, 7, 8, 8, 9, 9, 10, 10, 11, 11,\n 12, 12, 13, 13};\n\n/*\n Huffman code decoding is performed using a multi-level table lookup.\n The fastest way to decode is to simply build a lookup table whose\n size is determined by the longest code. However, the time it takes\n to build this table can also be a factor if the data being decoded\n is not very long. The most common codes are necessarily the\n shortest codes, so those codes dominate the decoding time, and hence\n the speed. The idea is you can have a shorter table that decodes the\n shorter, more probable codes, and then point to subsidiary tables for\n the longer codes. The time it costs to decode the longer codes is\n then traded against the time it takes to make longer tables.\n\n This results of this trade are in the variables lbits and dbits\n below. lbits is the number of bits the first level table for literal/\n length codes can decode in one step, and dbits is the same thing for\n the distance codes. Subsequent tables are also less than or equal to\n those sizes. These values may be adjusted either when all of the\n codes are shorter than that, in which case the longest code length in\n bits is used, or when the shortest code is *longer* than the requested\n table size, in which case the length of the shortest code in bits is\n used.\n\n There are two different values for the two tables, since they code a\n different number of possibilities each. The literal/length table\n codes 286 possible values, or in a flat code, a little over eight\n bits. The distance table codes 30 possible values, or a little less\n than five bits, flat. The optimum values for speed end up being\n about one bit more than those, so lbits is 8+1 and dbits is 5+1.\n The optimum values may differ though from machine to machine, and\n possibly even between compilers. Your mileage may vary.\n */\n\n\n/* If BMAX needs to be larger than 16, then h and x[] should be uLong. */\n#define BMAX 15 /* maximum bit length of any code */\n#define N_MAX 288 /* maximum number of codes in any set */\n\n#ifdef DEBUG_ZLIB\n uInt inflate_hufts;\n#endif\n\nlocal int huft_build(b, n, s, d, e, t, m, zs)\nuIntf *b; /* code lengths in bits (all assumed <= BMAX) */\nuInt n; /* number of codes (assumed <= N_MAX) */\nuInt s; /* number of simple-valued codes (0..s-1) */\nconst uIntf *d; /* list of base values for non-simple codes */\nconst uIntf *e; /* list of extra bits for non-simple codes */\ninflate_huft * FAR *t; /* result: starting table */\nuIntf *m; /* maximum lookup bits, returns actual */\nz_streamp zs; /* for zalloc function */\n/* Given a list of code lengths and a maximum table size, make a set of\n tables to decode that set of codes. Return Z_OK on success, Z_BUF_ERROR\n if the given code set is incomplete (the tables are still built in this\n case), Z_DATA_ERROR if the input is invalid (an over-subscribed set of\n lengths), or Z_MEM_ERROR if not enough memory. */\n{\n\n uInt a; /* counter for codes of length k */\n uInt c[BMAX+1]; /* bit length count table */\n uInt f; /* i repeats in table every f entries */\n int g; /* maximum code length */\n int h; /* table level */\n register uInt i; /* counter, current code */\n register uInt j; /* counter */\n register int k; /* number of bits in current code */\n int l; /* bits per table (returned in m) */\n register uIntf *p; /* pointer into c[], b[], or v[] */\n inflate_huft *q; /* points to current table */\n struct inflate_huft_s r; /* table entry for structure assignment */\n inflate_huft *u[BMAX]; /* table stack */\n uInt v[N_MAX]; /* values in order of bit length */\n register int w; /* bits before this table == (l * h) */\n uInt x[BMAX+1]; /* bit offsets, then code stack */\n uIntf *xp; /* pointer into x */\n int y; /* number of dummy codes added */\n uInt z; /* number of entries in current table */\n\n\n /* Generate counts for each bit length */\n p = c;\n#define C0 *p++ = 0;\n#define C2 C0 C0 C0 C0\n#define C4 C2 C2 C2 C2\n C4 /* clear c[]--assume BMAX+1 is 16 */\n p = b; i = n;\n do {\n c[*p++]++; /* assume all entries <= BMAX */\n } while (--i);\n if (c[0] == n) /* null input--all zero length codes */\n {\n *t = (inflate_huft *)Z_NULL;\n *m = 0;\n return Z_OK;\n }\n\n\n /* Find minimum and maximum length, bound *m by those */\n l = *m;\n for (j = 1; j <= BMAX; j++)\n if (c[j])\n break;\n k = j; /* minimum code length */\n if ((uInt)l < j)\n l = j;\n for (i = BMAX; i; i--)\n if (c[i])\n break;\n g = i; /* maximum code length */\n if ((uInt)l > i)\n l = i;\n *m = l;\n\n\n /* Adjust last length count to fill out codes, if needed */\n for (y = 1 << j; j < i; j++, y <<= 1)\n if ((y -= c[j]) < 0)\n return Z_DATA_ERROR;\n if ((y -= c[i]) < 0)\n return Z_DATA_ERROR;\n c[i] += y;\n\n\n /* Generate starting offsets into the value table for each length */\n x[1] = j = 0;\n p = c + 1; xp = x + 2;\n while (--i) { /* note that i == g from above */\n *xp++ = (j += *p++);\n }\n\n\n /* Make a table of values in order of bit lengths */\n p = b; i = 0;\n do {\n if ((j = *p++) != 0)\n v[x[j]++] = i;\n } while (++i < n);\n n = x[g]; /* set n to length of v */\n\n\n /* Generate the Huffman codes and for each, make the table entries */\n x[0] = i = 0; /* first Huffman code is zero */\n p = v; /* grab values in bit order */\n h = -1; /* no tables yet--level -1 */\n w = -l; /* bits decoded == (l * h) */\n u[0] = (inflate_huft *)Z_NULL; /* just to keep compilers happy */\n q = (inflate_huft *)Z_NULL; /* ditto */\n z = 0; /* ditto */\n\n /* go through the bit lengths (k already is bits in shortest code) */\n for (; k <= g; k++)\n {\n a = c[k];\n while (a--)\n {\n /* here i is the Huffman code of length k bits for value *p */\n /* make tables up to required level */\n while (k > w + l)\n {\n h++;\n w += l; /* previous table always l bits */\n\n /* compute minimum size table less than or equal to l bits */\n z = g - w;\n z = z > (uInt)l ? l : z; /* table size upper limit */\n if ((f = 1 << (j = k - w)) > a + 1) /* try a k-w bit table */\n { /* too few codes for k-w bit table */\n f -= a + 1; /* deduct codes from patterns left */\n xp = c + k;\n if (j < z)\n while (++j < z) /* try smaller tables up to z bits */\n {\n if ((f <<= 1) <= *++xp)\n break; /* enough codes to use up j bits */\n f -= *xp; /* else deduct codes from patterns */\n }\n }\n z = 1 << j; /* table entries for j-bit table */\n\n /* allocate and link in new table */\n if ((q = (inflate_huft *)ZALLOC\n (zs,z + 1,sizeof(inflate_huft))) == Z_NULL)\n {\n if (h)\n inflate_trees_free(u[0], zs);\n return Z_MEM_ERROR; /* not enough memory */\n }\n#ifdef DEBUG_ZLIB\n inflate_hufts += z + 1;\n#endif\n *t = q + 1; /* link to list for huft_free() */\n *(t = &(q->next)) = Z_NULL;\n u[h] = ++q; /* table starts after link */\n\n /* connect to last table, if there is one */\n if (h)\n {\n x[h] = i; /* save pattern for backing up */\n r.bits = (Byte)l; /* bits to dump before this table */\n r.exop = (Byte)j; /* bits in this table */\n r.next = q; /* pointer to this table */\n j = i >> (w - l); /* (get around Turbo C bug) */\n u[h-1][j] = r; /* connect to last table */\n }\n }\n\n /* set up table entry in r */\n r.bits = (Byte)(k - w);\n if (p >= v + n)\n r.exop = 128 + 64; /* out of values--invalid code */\n else if (*p < s)\n {\n r.exop = (Byte)(*p < 256 ? 0 : 32 + 64); /* 256 is end-of-block */\n r.base = *p++; /* simple code is just the value */\n }\n else\n {\n r.exop = (Byte)(e[*p - s] + 16 + 64);/* non-simple--look up in lists */\n r.base = d[*p++ - s];\n }\n\n /* fill code-like entries with r */\n f = 1 << (k - w);\n for (j = i >> w; j < z; j += f)\n q[j] = r;\n\n /* backwards increment the k-bit code i */\n for (j = 1 << (k - 1); i & j; j >>= 1)\n i ^= j;\n i ^= j;\n\n /* backup over finished tables */\n while ((i & ((1 << w) - 1)) != x[h])\n {\n h--; /* don't need to update q */\n w -= l;\n }\n }\n }\n\n\n /* Return Z_BUF_ERROR if we were given an incomplete table */\n return y != 0 && g != 1 ? Z_BUF_ERROR : Z_OK;\n}\n\n\nint inflate_trees_bits(c, bb, tb, z)\nuIntf *c; /* 19 code lengths */\nuIntf *bb; /* bits tree desired/actual depth */\ninflate_huft * FAR *tb; /* bits tree result */\nz_streamp z; /* for zfree function */\n{\n int r;\n\n r = huft_build(c, 19, 19, (uIntf*)Z_NULL, (uIntf*)Z_NULL, tb, bb, z);\n if (r == Z_DATA_ERROR)\n z->msg = (char*)\"oversubscribed dynamic bit lengths tree\";\n else if (r == Z_BUF_ERROR || *bb == 0)\n {\n inflate_trees_free(*tb, z);\n z->msg = (char*)\"incomplete dynamic bit lengths tree\";\n r = Z_DATA_ERROR;\n }\n return r;\n}\n\n\nint inflate_trees_dynamic(nl, nd, c, bl, bd, tl, td, z)\nuInt nl; /* number of literal/length codes */\nuInt nd; /* number of distance codes */\nuIntf *c; /* that many (total) code lengths */\nuIntf *bl; /* literal desired/actual bit depth */\nuIntf *bd; /* distance desired/actual bit depth */\ninflate_huft * FAR *tl; /* literal/length tree result */\ninflate_huft * FAR *td; /* distance tree result */\nz_streamp z; /* for zfree function */\n{\n int r;\n\n /* build literal/length tree */\n r = huft_build(c, nl, 257, cplens, cplext, tl, bl, z);\n if (r != Z_OK || *bl == 0)\n {\n if (r == Z_DATA_ERROR)\n z->msg = (char*)\"oversubscribed literal/length tree\";\n else if (r != Z_MEM_ERROR)\n {\n inflate_trees_free(*tl, z);\n z->msg = (char*)\"incomplete literal/length tree\";\n r = Z_DATA_ERROR;\n }\n return r;\n }\n\n /* build distance tree */\n r = huft_build(c + nl, nd, 0, cpdist, cpdext, td, bd, z);\n if (r != Z_OK || (*bd == 0 && nl > 257))\n {\n if (r == Z_DATA_ERROR)\n z->msg = (char*)\"oversubscribed distance tree\";\n else if (r == Z_BUF_ERROR) {\n#ifdef PKZIP_BUG_WORKAROUND\n r = Z_OK;\n }\n#else\n inflate_trees_free(*td, z);\n z->msg = (char*)\"incomplete distance tree\";\n r = Z_DATA_ERROR;\n }\n else if (r != Z_MEM_ERROR)\n {\n z->msg = (char*)\"empty distance tree with lengths\";\n r = Z_DATA_ERROR;\n }\n inflate_trees_free(*tl, z);\n return r;\n#endif\n }\n\n /* done */\n return Z_OK;\n}\n\n\n/* build fixed tables only once--keep them here */\nlocal int fixed_built = 0;\n#define FIXEDH 530 /* number of hufts used by fixed tables */\nlocal inflate_huft fixed_mem[FIXEDH];\nlocal uInt fixed_bl;\nlocal uInt fixed_bd;\nlocal inflate_huft *fixed_tl;\nlocal inflate_huft *fixed_td;\n\n\nlocal voidpf falloc(q, n, s)\nvoidpf q; /* opaque pointer */\nuInt n; /* number of items */\nuInt s; /* size of item */\n{\n Assert(s == sizeof(inflate_huft) && n <= *(intf *)q,\n \"inflate_trees falloc overflow\");\n *(intf *)q -= n+s-s; /* s-s to avoid warning */\n return (voidpf)(fixed_mem + *(intf *)q);\n}\n\n\nint inflate_trees_fixed(bl, bd, tl, td)\nuIntf *bl; /* literal desired/actual bit depth */\nuIntf *bd; /* distance desired/actual bit depth */\ninflate_huft * FAR *tl; /* literal/length tree result */\ninflate_huft * FAR *td; /* distance tree result */\n{\n /* build fixed tables if not already (multiple overlapped executions ok) */\n if (!fixed_built)\n {\n int k; /* temporary variable */\n unsigned c[288]; /* length list for huft_build */\n z_stream z; /* for falloc function */\n int f = FIXEDH; /* number of hufts left in fixed_mem */\n\n /* set up fake z_stream for memory routines */\n z.zalloc = falloc;\n z.zfree = Z_NULL;\n z.opaque = (voidpf)&f;\n\n /* literal table */\n for (k = 0; k < 144; k++)\n c[k] = 8;\n for (; k < 256; k++)\n c[k] = 9;\n for (; k < 280; k++)\n c[k] = 7;\n for (; k < 288; k++)\n c[k] = 8;\n fixed_bl = 7;\n huft_build(c, 288, 257, cplens, cplext, &fixed_tl, &fixed_bl, &z);\n\n /* distance table */\n for (k = 0; k < 30; k++)\n c[k] = 5;\n fixed_bd = 5;\n huft_build(c, 30, 0, cpdist, cpdext, &fixed_td, &fixed_bd, &z);\n\n /* done */\n Assert(f == 0, \"invalid build of fixed tables\");\n fixed_built = 1;\n }\n *bl = fixed_bl;\n *bd = fixed_bd;\n *tl = fixed_tl;\n *td = fixed_td;\n return Z_OK;\n}\n\n\nint inflate_trees_free(t, z)\ninflate_huft *t; /* table to free */\nz_streamp z; /* for zfree function */\n/* Free the malloc'ed tables built by huft_build(), which makes a linked\n list of the tables it made, with the links in a dummy first entry of\n each table. */\n{\n register inflate_huft *p, *q, *r;\n\n /* Reverse linked list */\n p = Z_NULL;\n q = t;\n while (q != Z_NULL)\n {\n r = (q - 1)->next;\n (q - 1)->next = p;\n p = q;\n q = r;\n }\n /* Go through linked list, freeing from the malloced (t[-1]) address. */\n while (p != Z_NULL)\n {\n q = (--p)->next;\n ZFREE(z,p);\n p = q;\n } \n return Z_OK;\n}\n/* --- inftrees.c */\n\n/* +++ infcodes.c */\n/* infcodes.c -- process literals and length/distance pairs\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* #include \"zutil.h\" */\n/* #include \"inftrees.h\" */\n/* #include \"infblock.h\" */\n/* #include \"infcodes.h\" */\n/* #include \"infutil.h\" */\n\n/* +++ inffast.h */\n/* inffast.h -- header to use inffast.c\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* WARNING: this file should *not* be used by applications. It is\n part of the implementation of the compression library and is\n subject to change. Applications should only use zlib.h.\n */\n\nextern int inflate_fast OF((\n uInt,\n uInt,\n inflate_huft *,\n inflate_huft *,\n inflate_blocks_statef *,\n z_streamp ));\n/* --- inffast.h */\n\n/* simplify the use of the inflate_huft type with some defines */\n#define base more.Base\n#define next more.Next\n#define exop word.what.Exop\n#define bits word.what.Bits\n\n/* inflate codes private state */\nstruct inflate_codes_state {\n\n /* mode */\n enum { /* waiting for \"i:\"=input, \"o:\"=output, \"x:\"=nothing */\n START, /* x: set up for LEN */\n LEN, /* i: get length/literal/eob next */\n LENEXT, /* i: getting length extra (have base) */\n DIST, /* i: get distance next */\n DISTEXT, /* i: getting distance extra */\n COPY, /* o: copying bytes in window, waiting for space */\n LIT, /* o: got literal, waiting for output space */\n WASH, /* o: got eob, possibly still output waiting */\n END, /* x: got eob and all data flushed */\n BADCODE} /* x: got error */\n mode; /* current inflate_codes mode */\n\n /* mode dependent information */\n uInt len;\n union {\n struct {\n inflate_huft *tree; /* pointer into tree */\n uInt need; /* bits needed */\n } code; /* if LEN or DIST, where in tree */\n uInt lit; /* if LIT, literal */\n struct {\n uInt get; /* bits to get for extra */\n uInt dist; /* distance back to copy from */\n } copy; /* if EXT or COPY, where and how much */\n } sub; /* submode */\n\n /* mode independent information */\n Byte lbits; /* ltree bits decoded per branch */\n Byte dbits; /* dtree bits decoder per branch */\n inflate_huft *ltree; /* literal/length/eob tree */\n inflate_huft *dtree; /* distance tree */\n\n};\n\n\ninflate_codes_statef *inflate_codes_new(bl, bd, tl, td, z)\nuInt bl, bd;\ninflate_huft *tl;\ninflate_huft *td; /* need separate declaration for Borland C++ */\nz_streamp z;\n{\n inflate_codes_statef *c;\n\n if ((c = (inflate_codes_statef *)\n ZALLOC(z,1,sizeof(struct inflate_codes_state))) != Z_NULL)\n {\n c->mode = START;\n c->lbits = (Byte)bl;\n c->dbits = (Byte)bd;\n c->ltree = tl;\n c->dtree = td;\n Tracev((stderr, \"inflate: codes new\\n\"));\n }\n return c;\n}\n\n\nint inflate_codes(s, z, r)\ninflate_blocks_statef *s;\nz_streamp z;\nint r;\n{\n uInt j; /* temporary storage */\n inflate_huft *t; /* temporary pointer */\n uInt e; /* extra bits or operation */\n uLong b; /* bit buffer */\n uInt k; /* bits in bit buffer */\n Bytef *p; /* input data pointer */\n uInt n; /* bytes available there */\n Bytef *q; /* output window write pointer */\n uInt m; /* bytes to end of window or read pointer */\n Bytef *f; /* pointer to copy strings from */\n inflate_codes_statef *c = s->sub.decode.codes; /* codes state */\n\n /* copy input/output information to locals (UPDATE macro restores) */\n LOAD\n\n /* process input and output based on current state */\n while (1) switch (c->mode)\n { /* waiting for \"i:\"=input, \"o:\"=output, \"x:\"=nothing */\n case START: /* x: set up for LEN */\n#ifndef SLOW\n if (m >= 258 && n >= 10)\n {\n UPDATE\n r = inflate_fast(c->lbits, c->dbits, c->ltree, c->dtree, s, z);\n LOAD\n if (r != Z_OK)\n {\n c->mode = r == Z_STREAM_END ? WASH : BADCODE;\n break;\n }\n }\n#endif /* !SLOW */\n c->sub.code.need = c->lbits;\n c->sub.code.tree = c->ltree;\n c->mode = LEN;\n case LEN: /* i: get length/literal/eob next */\n j = c->sub.code.need;\n NEEDBITS(j)\n t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);\n DUMPBITS(t->bits)\n e = (uInt)(t->exop);\n if (e == 0) /* literal */\n {\n c->sub.lit = t->base;\n Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?\n \"inflate: literal '%c'\\n\" :\n \"inflate: literal 0x%02x\\n\", t->base));\n c->mode = LIT;\n break;\n }\n if (e & 16) /* length */\n {\n c->sub.copy.get = e & 15;\n c->len = t->base;\n c->mode = LENEXT;\n break;\n }\n if ((e & 64) == 0) /* next table */\n {\n c->sub.code.need = e;\n c->sub.code.tree = t->next;\n break;\n }\n if (e & 32) /* end of block */\n {\n Tracevv((stderr, \"inflate: end of block\\n\"));\n c->mode = WASH;\n break;\n }\n c->mode = BADCODE; /* invalid code */\n z->msg = (char*)\"invalid literal/length code\";\n r = Z_DATA_ERROR;\n LEAVE\n case LENEXT: /* i: getting length extra (have base) */\n j = c->sub.copy.get;\n NEEDBITS(j)\n c->len += (uInt)b & inflate_mask[j];\n DUMPBITS(j)\n c->sub.code.need = c->dbits;\n c->sub.code.tree = c->dtree;\n Tracevv((stderr, \"inflate: length %u\\n\", c->len));\n c->mode = DIST;\n case DIST: /* i: get distance next */\n j = c->sub.code.need;\n NEEDBITS(j)\n t = c->sub.code.tree + ((uInt)b & inflate_mask[j]);\n DUMPBITS(t->bits)\n e = (uInt)(t->exop);\n if (e & 16) /* distance */\n {\n c->sub.copy.get = e & 15;\n c->sub.copy.dist = t->base;\n c->mode = DISTEXT;\n break;\n }\n if ((e & 64) == 0) /* next table */\n {\n c->sub.code.need = e;\n c->sub.code.tree = t->next;\n break;\n }\n c->mode = BADCODE; /* invalid code */\n z->msg = (char*)\"invalid distance code\";\n r = Z_DATA_ERROR;\n LEAVE\n case DISTEXT: /* i: getting distance extra */\n j = c->sub.copy.get;\n NEEDBITS(j)\n c->sub.copy.dist += (uInt)b & inflate_mask[j];\n DUMPBITS(j)\n Tracevv((stderr, \"inflate: distance %u\\n\", c->sub.copy.dist));\n c->mode = COPY;\n case COPY: /* o: copying bytes in window, waiting for space */\n#ifndef __TURBOC__ /* Turbo C bug for following expression */\n f = (uInt)(q - s->window) < c->sub.copy.dist ?\n s->end - (c->sub.copy.dist - (q - s->window)) :\n q - c->sub.copy.dist;\n#else\n f = q - c->sub.copy.dist;\n if ((uInt)(q - s->window) < c->sub.copy.dist)\n f = s->end - (c->sub.copy.dist - (uInt)(q - s->window));\n#endif\n while (c->len)\n {\n NEEDOUT\n OUTBYTE(*f++)\n if (f == s->end)\n f = s->window;\n c->len--;\n }\n c->mode = START;\n break;\n case LIT: /* o: got literal, waiting for output space */\n NEEDOUT\n OUTBYTE(c->sub.lit)\n c->mode = START;\n break;\n case WASH: /* o: got eob, possibly more output */\n FLUSH\n if (s->read != s->write)\n LEAVE\n c->mode = END;\n case END:\n r = Z_STREAM_END;\n LEAVE\n case BADCODE: /* x: got error */\n r = Z_DATA_ERROR;\n LEAVE\n default:\n r = Z_STREAM_ERROR;\n LEAVE\n }\n}\n\n\nvoid inflate_codes_free(c, z)\ninflate_codes_statef *c;\nz_streamp z;\n{\n ZFREE(z, c);\n Tracev((stderr, \"inflate: codes free\\n\"));\n}\n/* --- infcodes.c */\n\n/* +++ infutil.c */\n/* inflate_util.c -- data and routines common to blocks and codes\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* #include \"zutil.h\" */\n/* #include \"infblock.h\" */\n/* #include \"inftrees.h\" */\n/* #include \"infcodes.h\" */\n/* #include \"infutil.h\" */\n\n#ifndef NO_DUMMY_DECL\nstruct inflate_codes_state {int dummy;}; /* for buggy compilers */\n#endif\n\n/* And'ing with mask[n] masks the lower n bits */\nuInt inflate_mask[17] = {\n 0x0000,\n 0x0001, 0x0003, 0x0007, 0x000f, 0x001f, 0x003f, 0x007f, 0x00ff,\n 0x01ff, 0x03ff, 0x07ff, 0x0fff, 0x1fff, 0x3fff, 0x7fff, 0xffff\n};\n\n\n/* copy as much as possible from the sliding window to the output area */\nint inflate_flush(s, z, r)\ninflate_blocks_statef *s;\nz_streamp z;\nint r;\n{\n uInt n;\n Bytef *p;\n Bytef *q;\n\n /* local copies of source and destination pointers */\n p = z->next_out;\n q = s->read;\n\n /* compute number of bytes to copy as far as end of window */\n n = (uInt)((q <= s->write ? s->write : s->end) - q);\n if (n > z->avail_out) n = z->avail_out;\n if (n && r == Z_BUF_ERROR) r = Z_OK;\n\n /* update counters */\n z->avail_out -= n;\n z->total_out += n;\n\n /* update check information */\n if (s->checkfn != Z_NULL)\n z->adler = s->check = (*s->checkfn)(s->check, q, n);\n\n /* copy as far as end of window */\n if (p != Z_NULL) {\n zmemcpy(p, q, n);\n p += n;\n }\n q += n;\n\n /* see if more to copy at beginning of window */\n if (q == s->end)\n {\n /* wrap pointers */\n q = s->window;\n if (s->write == s->end)\n s->write = s->window;\n\n /* compute bytes to copy */\n n = (uInt)(s->write - q);\n if (n > z->avail_out) n = z->avail_out;\n if (n && r == Z_BUF_ERROR) r = Z_OK;\n\n /* update counters */\n z->avail_out -= n;\n z->total_out += n;\n\n /* update check information */\n if (s->checkfn != Z_NULL)\n z->adler = s->check = (*s->checkfn)(s->check, q, n);\n\n /* copy */\n if (p != Z_NULL) {\n zmemcpy(p, q, n);\n p += n;\n }\n q += n;\n }\n\n /* update pointers */\n z->next_out = p;\n s->read = q;\n\n /* done */\n return r;\n}\n/* --- infutil.c */\n\n/* +++ inffast.c */\n/* inffast.c -- process literals and length/distance pairs fast\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* #include \"zutil.h\" */\n/* #include \"inftrees.h\" */\n/* #include \"infblock.h\" */\n/* #include \"infcodes.h\" */\n/* #include \"infutil.h\" */\n/* #include \"inffast.h\" */\n\n#ifndef NO_DUMMY_DECL\nstruct inflate_codes_state {int dummy;}; /* for buggy compilers */\n#endif\n\n/* simplify the use of the inflate_huft type with some defines */\n#define base more.Base\n#define next more.Next\n#define exop word.what.Exop\n#define bits word.what.Bits\n\n/* macros for bit input with no checking and for returning unused bytes */\n#define GRABBITS(j) {while(k<(j)){b|=((uLong)NEXTBYTE)<>3);p-=c;k&=7;}\n\n/* Called with number of bytes left to write in window at least 258\n (the maximum string length) and number of input bytes available\n at least ten. The ten bytes are six bytes for the longest length/\n distance pair plus four bytes for overloading the bit buffer. */\n\nint inflate_fast(bl, bd, tl, td, s, z)\nuInt bl, bd;\ninflate_huft *tl;\ninflate_huft *td; /* need separate declaration for Borland C++ */\ninflate_blocks_statef *s;\nz_streamp z;\n{\n inflate_huft *t; /* temporary pointer */\n uInt e; /* extra bits or operation */\n uLong b; /* bit buffer */\n uInt k; /* bits in bit buffer */\n Bytef *p; /* input data pointer */\n uInt n; /* bytes available there */\n Bytef *q; /* output window write pointer */\n uInt m; /* bytes to end of window or read pointer */\n uInt ml; /* mask for literal/length tree */\n uInt md; /* mask for distance tree */\n uInt c; /* bytes to copy */\n uInt d; /* distance back to copy from */\n Bytef *r; /* copy source pointer */\n\n /* load input, output, bit values */\n LOAD\n\n /* initialize masks */\n ml = inflate_mask[bl];\n md = inflate_mask[bd];\n\n /* do until not enough input or output space for fast loop */\n do { /* assume called with m >= 258 && n >= 10 */\n /* get literal/length code */\n GRABBITS(20) /* max bits for literal/length code */\n if ((e = (t = tl + ((uInt)b & ml))->exop) == 0)\n {\n DUMPBITS(t->bits)\n Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?\n \"inflate: * literal '%c'\\n\" :\n \"inflate: * literal 0x%02x\\n\", t->base));\n *q++ = (Byte)t->base;\n m--;\n continue;\n }\n do {\n DUMPBITS(t->bits)\n if (e & 16)\n {\n /* get extra bits for length */\n e &= 15;\n c = t->base + ((uInt)b & inflate_mask[e]);\n DUMPBITS(e)\n Tracevv((stderr, \"inflate: * length %u\\n\", c));\n\n /* decode distance base of block to copy */\n GRABBITS(15); /* max bits for distance code */\n e = (t = td + ((uInt)b & md))->exop;\n do {\n DUMPBITS(t->bits)\n if (e & 16)\n {\n /* get extra bits to add to distance base */\n e &= 15;\n GRABBITS(e) /* get extra bits (up to 13) */\n d = t->base + ((uInt)b & inflate_mask[e]);\n DUMPBITS(e)\n Tracevv((stderr, \"inflate: * distance %u\\n\", d));\n\n /* do the copy */\n m -= c;\n if ((uInt)(q - s->window) >= d) /* offset before dest */\n { /* just copy */\n r = q - d;\n *q++ = *r++; c--; /* minimum count is three, */\n *q++ = *r++; c--; /* so unroll loop a little */\n }\n else /* else offset after destination */\n {\n e = d - (uInt)(q - s->window); /* bytes from offset to end */\n r = s->end - e; /* pointer to offset */\n if (c > e) /* if source crosses, */\n {\n c -= e; /* copy to end of window */\n do {\n *q++ = *r++;\n } while (--e);\n r = s->window; /* copy rest from start of window */\n }\n }\n do { /* copy all or what's left */\n *q++ = *r++;\n } while (--c);\n break;\n }\n else if ((e & 64) == 0)\n e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop;\n else\n {\n z->msg = (char*)\"invalid distance code\";\n UNGRAB\n UPDATE\n return Z_DATA_ERROR;\n }\n } while (1);\n break;\n }\n if ((e & 64) == 0)\n {\n if ((e = (t = t->next + ((uInt)b & inflate_mask[e]))->exop) == 0)\n {\n DUMPBITS(t->bits)\n Tracevv((stderr, t->base >= 0x20 && t->base < 0x7f ?\n \"inflate: * literal '%c'\\n\" :\n \"inflate: * literal 0x%02x\\n\", t->base));\n *q++ = (Byte)t->base;\n m--;\n break;\n }\n }\n else if (e & 32)\n {\n Tracevv((stderr, \"inflate: * end of block\\n\"));\n UNGRAB\n UPDATE\n return Z_STREAM_END;\n }\n else\n {\n z->msg = (char*)\"invalid literal/length code\";\n UNGRAB\n UPDATE\n return Z_DATA_ERROR;\n }\n } while (1);\n } while (m >= 258 && n >= 10);\n\n /* not enough input or output--restore pointers and return */\n UNGRAB\n UPDATE\n return Z_OK;\n}\n/* --- inffast.c */\n\n/* +++ zutil.c */\n/* zutil.c -- target dependent utility functions for the compression library\n * Copyright (C) 1995-1996 Jean-loup Gailly.\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* From: zutil.c,v 1.17 1996/07/24 13:41:12 me Exp $ */\n\n#ifdef DEBUG_ZLIB\n#include \n#endif\n\n/* #include \"zutil.h\" */\n\n#ifndef NO_DUMMY_DECL\nstruct internal_state {int dummy;}; /* for buggy compilers */\n#endif\n\n#ifndef STDC\nextern void exit OF((int));\n#endif\n\nstatic const char *z_errmsg[10] = {\n\"need dictionary\", /* Z_NEED_DICT 2 */\n\"stream end\", /* Z_STREAM_END 1 */\n\"\", /* Z_OK 0 */\n\"file error\", /* Z_ERRNO (-1) */\n\"stream error\", /* Z_STREAM_ERROR (-2) */\n\"data error\", /* Z_DATA_ERROR (-3) */\n\"insufficient memory\", /* Z_MEM_ERROR (-4) */\n\"buffer error\", /* Z_BUF_ERROR (-5) */\n\"incompatible version\",/* Z_VERSION_ERROR (-6) */\n\"\"};\n\n\nconst char *zlibVersion()\n{\n return ZLIB_VERSION;\n}\n\n#ifdef DEBUG_ZLIB\nvoid z_error (m)\n char *m;\n{\n fprintf(stderr, \"%s\\n\", m);\n exit(1);\n}\n#endif\n\n#ifndef HAVE_MEMCPY\n\nvoid zmemcpy(dest, source, len)\n Bytef* dest;\n Bytef* source;\n uInt len;\n{\n if (len == 0) return;\n do {\n *dest++ = *source++; /* ??? to be unrolled */\n } while (--len != 0);\n}\n\nint zmemcmp(s1, s2, len)\n Bytef* s1;\n Bytef* s2;\n uInt len;\n{\n uInt j;\n\n for (j = 0; j < len; j++) {\n if (s1[j] != s2[j]) return 2*(s1[j] > s2[j])-1;\n }\n return 0;\n}\n\nvoid zmemzero(dest, len)\n Bytef* dest;\n uInt len;\n{\n if (len == 0) return;\n do {\n *dest++ = 0; /* ??? to be unrolled */\n } while (--len != 0);\n}\n#endif\n\n#ifdef __TURBOC__\n#if (defined( __BORLANDC__) || !defined(SMALL_MEDIUM)) && !defined(__32BIT__)\n/* Small and medium model in Turbo C are for now limited to near allocation\n * with reduced MAX_WBITS and MAX_MEM_LEVEL\n */\n# define MY_ZCALLOC\n\n/* Turbo C malloc() does not allow dynamic allocation of 64K bytes\n * and farmalloc(64K) returns a pointer with an offset of 8, so we\n * must fix the pointer. Warning: the pointer must be put back to its\n * original form in order to free it, use zcfree().\n */\n\n#define MAX_PTR 10\n/* 10*64K = 640K */\n\nlocal int next_ptr = 0;\n\ntypedef struct ptr_table_s {\n voidpf org_ptr;\n voidpf new_ptr;\n} ptr_table;\n\nlocal ptr_table table[MAX_PTR];\n/* This table is used to remember the original form of pointers\n * to large buffers (64K). Such pointers are normalized with a zero offset.\n * Since MSDOS is not a preemptive multitasking OS, this table is not\n * protected from concurrent access. This hack doesn't work anyway on\n * a protected system like OS/2. Use Microsoft C instead.\n */\n\nvoidpf zcalloc (voidpf opaque, unsigned items, unsigned size)\n{\n voidpf buf = opaque; /* just to make some compilers happy */\n ulg bsize = (ulg)items*size;\n\n /* If we allocate less than 65520 bytes, we assume that farmalloc\n * will return a usable pointer which doesn't have to be normalized.\n */\n if (bsize < 65520L) {\n buf = farmalloc(bsize);\n if (*(ush*)&buf != 0) return buf;\n } else {\n buf = farmalloc(bsize + 16L);\n }\n if (buf == NULL || next_ptr >= MAX_PTR) return NULL;\n table[next_ptr].org_ptr = buf;\n\n /* Normalize the pointer to seg:0 */\n *((ush*)&buf+1) += ((ush)((uch*)buf-0) + 15) >> 4;\n *(ush*)&buf = 0;\n table[next_ptr++].new_ptr = buf;\n return buf;\n}\n\nvoid zcfree (voidpf opaque, voidpf ptr)\n{\n int n;\n if (*(ush*)&ptr != 0) { /* object < 64K */\n farfree(ptr);\n return;\n }\n /* Find the original pointer */\n for (n = 0; n < next_ptr; n++) {\n if (ptr != table[n].new_ptr) continue;\n\n farfree(table[n].org_ptr);\n while (++n < next_ptr) {\n table[n-1] = table[n];\n }\n next_ptr--;\n return;\n }\n ptr = opaque; /* just to make some compilers happy */\n Assert(0, \"zcfree: ptr not found\");\n}\n#endif\n#endif /* __TURBOC__ */\n\n\n#if defined(M_I86) && !defined(__32BIT__)\n/* Microsoft C in 16-bit mode */\n\n# define MY_ZCALLOC\n\n#if (!defined(_MSC_VER) || (_MSC_VER < 600))\n# define _halloc halloc\n# define _hfree hfree\n#endif\n\nvoidpf zcalloc (voidpf opaque, unsigned items, unsigned size)\n{\n if (opaque) opaque = 0; /* to make compiler happy */\n return _halloc((long)items, size);\n}\n\nvoid zcfree (voidpf opaque, voidpf ptr)\n{\n if (opaque) opaque = 0; /* to make compiler happy */\n _hfree(ptr);\n}\n\n#endif /* MSC */\n\n\n#ifndef MY_ZCALLOC /* Any system without a special alloc function */\n\n#ifndef STDC\nextern voidp calloc OF((uInt items, uInt size));\nextern void free OF((voidpf ptr));\n#endif\n\nvoidpf zcalloc (opaque, items, size)\n voidpf opaque;\n unsigned items;\n unsigned size;\n{\n if (opaque) items += size - size; /* make compiler happy */\n return (voidpf)calloc(items, size);\n}\n\nvoid zcfree (opaque, ptr)\n voidpf opaque;\n voidpf ptr;\n{\n free(ptr);\n if (opaque) return; /* make compiler happy */\n}\n\n#endif /* MY_ZCALLOC */\n/* --- zutil.c */\n\n/* +++ adler32.c */\n/* adler32.c -- compute the Adler-32 checksum of a data stream\n * Copyright (C) 1995-1996 Mark Adler\n * For conditions of distribution and use, see copyright notice in zlib.h \n */\n\n/* From: adler32.c,v 1.10 1996/05/22 11:52:18 me Exp $ */\n\n/* #include \"zlib.h\" */\n\n#define BASE 65521L /* largest prime smaller than 65536 */\n#define NMAX 5552\n/* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */\n\n#define DO1(buf,i) {s1 += buf[(i)]; s2 += s1;}\n#define DO2(buf,i) DO1(buf,i); DO1(buf,(i)+1);\n#define DO4(buf,i) DO2(buf,i); DO2(buf,(i)+2);\n#define DO8(buf,i) DO4(buf,i); DO4(buf,(i)+4);\n#define DO16(buf) DO8(buf,0); DO8(buf,8);\n\n/* ========================================================================= */\nuLong adler32(adler, buf, len)\n uLong adler;\n const Bytef *buf;\n uInt len;\n{\n unsigned long s1 = adler & 0xffff;\n unsigned long s2 = (adler >> 16) & 0xffff;\n int k;\n\n if (buf == Z_NULL) return 1L;\n\n while (len > 0) {\n k = len < NMAX ? len : NMAX;\n len -= k;\n while (k >= 16) {\n DO16(buf);\n\t buf += 16;\n k -= 16;\n }\n if (k != 0) do {\n s1 += *buf++;\n\t s2 += s1;\n } while (--k);\n s1 %= BASE;\n s2 %= BASE;\n }\n return (s2 << 16) | s1;\n}\n/* --- adler32.c */\n\n#ifdef _KERNEL\nstatic int\nzlib_modevent(module_t mod, int type, void *unused)\n{\n\tswitch (type) {\n\tcase MOD_LOAD:\n\t\treturn 0;\n\tcase MOD_UNLOAD:\n\t\treturn 0;\n\t}\n\treturn EINVAL;\n}\n\nstatic moduledata_t zlib_mod = {\n\t\"zlib\",\n\tzlib_modevent,\n\t0\n};\nDECLARE_MODULE(zlib, zlib_mod, SI_SUB_DRIVERS, SI_ORDER_FIRST);\nMODULE_VERSION(zlib, 1);\n#endif /* _KERNEL */\n", "meta": {"content_hash": "b0f3a46a1a836cf5cb7e6eddf1b1927c", "timestamp": "", "source": "github", "line_count": 5414, "max_line_length": 82, "avg_line_length": 33.080716660509786, "alnum_prop": 0.5529846621142497, "repo_name": "dplbsd/zcaplib", "id": "09511c11644e203f2f3d96f4637d08d335d5507f", "size": "179099", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "head/sys/net/zlib.c", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "AGS Script", "bytes": "62471"}, {"name": "Assembly", "bytes": "4478661"}, {"name": "Awk", "bytes": "278525"}, {"name": "Batchfile", "bytes": "20417"}, {"name": "C", "bytes": "383420305"}, {"name": "C++", "bytes": "72796771"}, {"name": "CSS", "bytes": "109748"}, {"name": "ChucK", "bytes": "39"}, {"name": "D", "bytes": "3784"}, {"name": "DIGITAL Command Language", "bytes": "10640"}, {"name": "DTrace", "bytes": "2311027"}, {"name": "Emacs Lisp", "bytes": "65902"}, {"name": "EmberScript", "bytes": "286"}, {"name": "Forth", "bytes": "184405"}, {"name": "GAP", "bytes": "72156"}, {"name": "Groff", "bytes": "32248806"}, {"name": "HTML", "bytes": "6749816"}, {"name": "IGOR Pro", "bytes": "6301"}, {"name": "Java", "bytes": "112547"}, {"name": "KRL", "bytes": "4950"}, {"name": "Lex", "bytes": "398817"}, {"name": "Limbo", "bytes": "3583"}, {"name": "Logos", "bytes": "187900"}, {"name": "Makefile", "bytes": "3551839"}, {"name": "Mathematica", "bytes": "9556"}, {"name": "Max", "bytes": "4178"}, {"name": "Module Management System", "bytes": "817"}, {"name": "NSIS", "bytes": "3383"}, {"name": "Objective-C", "bytes": "836351"}, {"name": "PHP", "bytes": "6649"}, {"name": "Perl", "bytes": "5530761"}, {"name": "Perl6", "bytes": "41802"}, {"name": "PostScript", "bytes": "140088"}, {"name": "Prolog", "bytes": "29514"}, {"name": "Protocol Buffer", "bytes": "61933"}, {"name": "Python", "bytes": "299247"}, {"name": "R", "bytes": "764"}, {"name": "Rebol", "bytes": "738"}, {"name": "Ruby", "bytes": "45958"}, {"name": "Scilab", "bytes": "197"}, {"name": "Shell", "bytes": "10501540"}, {"name": "SourcePawn", "bytes": "463194"}, {"name": "SuperCollider", "bytes": "80208"}, {"name": "Tcl", "bytes": "80913"}, {"name": "TeX", "bytes": "719821"}, {"name": "VimL", "bytes": "22201"}, {"name": "XS", "bytes": "25451"}, {"name": "XSLT", "bytes": "31488"}, {"name": "Yacc", "bytes": "1857830"}]}} {"text": "#pragma once\n\n#include \"Actor/Material/Material.h\"\n\n#include \n\nnamespace ph\n{\n\nclass InputPacket;\nclass CookingContext;\nclass SurfaceMaterial;\nclass VolumeMaterial;\n\n// TODO: volume material\n\nclass FullMaterial final : public Material, public TCommandInterface\n{\npublic:\n\tFullMaterial();\n\tFullMaterial(const std::shared_ptr& surfaceMaterial);\n\n\tvoid genBehaviors(CookingContext& context, PrimitiveMetadata& metadata) const override;\n\nprivate:\n\tstd::shared_ptr m_surfaceMaterial;\n\tstd::shared_ptr m_interiorMaterial;\n\tstd::shared_ptr m_exteriorMaterial;\n\n// command interface\npublic:\n\texplicit FullMaterial(const InputPacket& packet);\n\tstatic SdlTypeInfo ciTypeInfo();\n\tstatic void ciRegister(CommandRegister& cmdRegister);\n};\n\n}// end namespace ph\n\n/*\n\t\n\n\t material \n\t full \n\t material.material \n\n\t Full Material \n\t\n\t\tA material model that combines surface and volume properties.\n\t\n\n\t\n\t\t\n\t\t\tA surface material.\n\t\t\n\t\t\n\t\t\tA volume material describing the inside of the surface.\n\t\t\n\t\t\n\t\t\tA volume material describing the outside of the surface.\n\t\t\n\t\n\n\t\n*/", "meta": {"content_hash": "683a356489491864c7ccfc956ac8d98b", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 88, "avg_line_length": 24.296875, "alnum_prop": 0.7434083601286173, "repo_name": "TzuChieh/Photon-v2", "id": "bb9c31840a3331eef3937829bf25e2d7ec23ecbb", "size": "1555", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Engine/Source/Actor/Material/FullMaterial.h", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "429"}, {"name": "C", "bytes": "7059"}, {"name": "C++", "bytes": "1572544"}, {"name": "CMake", "bytes": "7944"}, {"name": "Java", "bytes": "265901"}, {"name": "Objective-C", "bytes": "336"}, {"name": "Python", "bytes": "171368"}, {"name": "Shell", "bytes": "439"}]}} {"text": "'use strict';\n\n/**\n * Module dependencies.\n */\nvar should = require('should'),\n\tmongoose = require('mongoose'),\n\tUser = mongoose.model('User'),\n\tPlayer = mongoose.model('Player');\n\n/**\n * Globals\n */\nvar user, player;\n\n/**\n * Unit tests\n */\ndescribe('Player Model Unit Tests:', function() {\n\tbeforeEach(function(done) {\n\t\tuser = new User({\n\t\t\tfirstName: 'Full',\n\t\t\tlastName: 'Name',\n\t\t\tdisplayName: 'Full Name',\n\t\t\temail: 'test@test.com',\n\t\t\tusername: 'username',\n\t\t\tpassword: 'password'\n\t\t});\n\n\t\tuser.save(function() { \n\t\t\tplayer = new Player({\n\t\t\t\tname: 'Player Name',\n\t\t\t\tuser: user\n\t\t\t});\n\n\t\t\tdone();\n\t\t});\n\t});\n\n\tdescribe('Method Save', function() {\n\t\tit('should be able to save without problems', function(done) {\n\t\t\treturn player.save(function(err) {\n\t\t\t\tshould.not.exist(err);\n\t\t\t\tdone();\n\t\t\t});\n\t\t});\n\n\t\tit('should be able to show an error when try to save without name', function(done) { \n\t\t\tplayer.name = '';\n\n\t\t\treturn player.save(function(err) {\n\t\t\t\tshould.exist(err);\n\t\t\t\tdone();\n\t\t\t});\n\t\t});\n\t});\n\n\tafterEach(function(done) { \n\t\tPlayer.remove().exec();\n\t\tUser.remove().exec();\n\n\t\tdone();\n\t});\n});", "meta": {"content_hash": "355913364d83c5a4285a66bb7279ba03", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 87, "avg_line_length": 17.359375, "alnum_prop": 0.5931593159315932, "repo_name": "oferlivny/iteammaker", "id": "4e7b56a0c36ea60489ae56d7564e561a216d2dc4", "size": "1111", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "app/tests/player.server.model.test.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "538"}, {"name": "HTML", "bytes": "27971"}, {"name": "JavaScript", "bytes": "133065"}, {"name": "Shell", "bytes": "414"}]}} {"text": "var sys = require('sys');\n\nmodule.exports = function(id, bufferSize, maxBaseNicknameLength) {\n\tvar room = {};\n\tvar clientCount = 0;\n\tvar clients = {};\n\tvar nicknames = {};\n\tvar buffer = [];\n\n\tvar broadcast = function(message, sender) {\n\t\tfor (sessionId in clients) {\n\t\t\tvar client = clients[sessionId];\n\t\t\tif (buffer.length > bufferSize) buffer.shift();\n\n\t\t\tif (!sender || sender.sessionId != client.sessionId) {\n\t\t\t\tclient.send(message);\n\t\t\t}\n\t\t}\n\t};\n\n\tvar splitNickname = function(str) {\n\t\tvar reverse = parseInt(str.split('').reverse().join(''), 10);\n\t\tif (isNaN(reverse)) return {prefix: str, suffix: null};\n\t\tvar suffix = parseInt(String(reverse).split('').reverse().join(''), 10);\n\t\treturn {prefix: str.substring(0, str.length - String(suffix).length), suffix: suffix};\n\t};\n\n\n\tvar assignNickname = function(nickname, client) {\n\t\tnickname = nickname.substring(0, maxBaseNicknameLength);\n\t\tvar nicks = {};\n\t\tfor (sessionId in nicknames) {\n\t\t\tnicks[nicknames[sessionId]] = sessionId;\n\t\t}\n\n\t\tif (!(nickname in nicks)) {\n\t\t\tnicknames[client.sessionId] = nickname;\n\t\t\treturn;\n\t\t}\n\n\t\tvar nick = splitNickname(nickname);\n\t\tvar i = nick.suffix ? nick.suffix : 1;\n\t\twhile ((nick.prefix + i) in nicks && nicks[nick.prefix + i] != client.sessionId) {\n\t\t\ti++;\n\t\t}\n\n\t\tnicknames[client.sessionId] = nick.prefix + i;\n\t};\n\n\tvar getNickname = function(client) {\n\t\tif (!client) return null;\n\t\treturn nicknames[client.sessionId];\n\t};\n\n\troom.addClient = function(nickname, client) {\n\t\tclients[client.sessionId] = client;\n\t\tassignNickname(nickname, client);\n\t\tclientCount++;\n\t\tclient.send({\n\t\t\taction: 'init', \n\t\t\tvalue: {\n\t\t\t\tbuffer: buffer,\n\t\t\t\tnicknames: nicknames,\n\t\t\t\troomId: id\n\t\t\t}\n\t\t});\n\t\troom.broadcastAnnouncement(getNickname(client) + ' has joined.', client);\n\t\tbroadcast({action: 'nicknamesUpdated', value: nicknames});\n\t};\n\n\troom.removeClient = function(client) {\n\t\troom.broadcastAnnouncement(getNickname(client) + ' has left.', client);\n\t\tdelete clients[client.sessionId];\n\t\tdelete nicknames[client.sessionId];\n\t\tclientCount--;\n\t\tbroadcast({action: 'nicknamesUpdated', value: nicknames});\n\t};\n\n\troom.getClientCount = function() {\n\t\treturn clientCount;\n\t};\n\n\troom.getID = function() {\n\t\treturn id;\n\t};\n\n\troom.broadcastAnnouncement = function(message, sender) {\n\t\tbuffer.push({message: message, type: 'announcement', nickname: getNickname(sender)});\n\t\tbroadcast({action: 'announcement', value: {message: message, nickname: getNickname(sender)}}, sender);\n\t};\n\n\troom.broadcastStatus = function(status, sender) {\n\t\tstatus = getNickname(sender) + ' ' + status;\n\t\troom.broadcastAnnouncement(status);\n\t};\n\n\troom.broadcastMessage = function(message, sender) {\n\t\tbuffer.push({message: message, type: 'message', nickname: getNickname(sender)});\n\t\tbroadcast({action: 'message', value: {message: message, nickname: getNickname(sender)}}, sender);\n\t};\n\n\troom.changeNickname = function(nickname, client) {\n\t\tvar oldNick = getNickname(client);\n\t\tif (oldNick === nickname) {\n\t\t\treturn;\n\t\t}\n\t\tassignNickname(nickname, client);\n\t\tvar newNick = getNickname(client);\n\t\tif (oldNick === newNick) {\n\t\t\treturn;\n\t\t}\n\t\tbroadcast({action: 'nicknamesUpdated', value: nicknames});\n\t\troom.broadcastAnnouncement(oldNick + ' is now known as ' + newNick);\n\t};\n\n\treturn room;\n};\n", "meta": {"content_hash": "cd153d09951fec364460d7cb8c882bdd", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 104, "avg_line_length": 27.726495726495727, "alnum_prop": 0.6778668310727497, "repo_name": "ipartola/natchat", "id": "680c65dcc50099fb11bd46109372cf92eb45c2b7", "size": "3244", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "chat-room.js", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "78785"}, {"name": "Python", "bytes": "655"}]}} {"text": "'use strict';\n\nangular.module('myApp.register', ['ngRoute'])\n\n// Declared route\n.config(['$routeProvider', function($routeProvider) {\n $routeProvider.when('/register', {\n templateUrl: 'register/register.html',\n controller: 'RegisterCtrl'\n });\n}])\n\n// Register controller\n.controller('RegisterCtrl', [function() {\n\n}]);\n", "meta": {"content_hash": "b448e6505a990f4891411871b85109ea", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 53, "avg_line_length": 21.1875, "alnum_prop": 0.6519174041297935, "repo_name": "toshibakru/FinTech", "id": "8701eb275730a498179f35f7289721264dd44239", "size": "339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/register/register.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "1187"}, {"name": "HTML", "bytes": "16806"}, {"name": "JavaScript", "bytes": "14021"}]}} {"text": "\n\n\n \n Page hosted, for webview sample\n\n\n

This page is hosted at an external server. Observe how it interacts with the embedder application

\n
\n
\n
\n
\n
Go to google.com
\n

\n\n\n\n\n\n", "meta": {"content_hash": "468be6263b5582b61bb3ab2e9e89278a", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 106, "avg_line_length": 27.0, "alnum_prop": 0.7037037037037037, "repo_name": "hohanhb285/chrome-app-samples", "id": "8c5a5e1faf80d1ce395e205da79c73fd789ae65a", "size": "2187", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "webview/page_hosted_in_external_server.html", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "{% extends \"base.html\" %}\n{% block title%}{% endblock %}\n{% block header %}\n\n{% endblock %}\n{% block pager %}\n{% if paginator.page_count!=1 %}\n
\n{{paginator.pager(format='~20~')}}\n
\n{% endif %}\n{% endblock %}\n{% block content %}\n\n{{ self.pager() }}\n\n
\n{% if lines|length!=0 %}\n{% if mode=='normal': %}\n{% set last_date = None %}\n{% for line in lines %}\n{% set current_date = line['datetime'].date() %}\n{% if current_date!=last_date %}\n\t

{{current_date.strftime(\"%A %d %B %Y\")}}

\n{% set last_date = current_date %}\n{% endif %}\n\t

{{line['line']}}{{line['datetime'].strftime('%X')}}

\n{% endfor %}\n\t

View as BBCode

\n{% elif mode=='bbcode': %}\n{% for line in lines %}\n\t

[color=#{{line.color}}]{{line.line}}[/color]

\n{% endfor %}\n\t

View as text

\n{% endif %}\n{% endif %}\n

Export this log

\n
\n\n{{ self.pager() }}\n\n\n\n{% endblock %}\n", "meta": {"content_hash": "5fa4c19a9f7eac9a24e75f2475900373", "timestamp": "", "source": "github", "line_count": 115, "max_line_length": 208, "avg_line_length": 31.043478260869566, "alnum_prop": 0.5260504201680672, "repo_name": "MSPARP/MSPARP", "id": "85aca0cceee09ca8733a58efd72d245b736506c7", "size": "3570", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "templates/log.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "51494"}, {"name": "HTML", "bytes": "77834"}, {"name": "JavaScript", "bytes": "98022"}, {"name": "Python", "bytes": "115220"}]}} {"text": "@interface MenuViewController () \n{\n \n}\n@property (weak, nonatomic) IBOutlet UITableView *menuTableVIew;\n@property (strong, nonatomic) MenuDataSource *menuDataSource;\n@end\n\n@implementation MenuViewController\n\n\n#pragma mark - Table View Delegate\n\n- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath\n{\n [tableView deselectRowAtIndexPath:indexPath animated:NO];\n \n// NSLog(@\"%@\", [self.menuDataSource menuDataWithIndexPath:indexPath]);\n\n [self.menuDrawerVC.appVC closeDrawerWithAnimation:YES\n completion:^(BOOL finished){\n \n NSLog(@\"%s\", __PRETTY_FUNCTION__);\n \n }];\n}\n\n\n#pragma mark - View Controller\n\n- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil\n{\n self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];\n if (self) {\n self.menuDataSource = [MenuDataSource new];\n }\n return self;\n}\n\n- (void)viewDidLoad\n{\n [super viewDidLoad];\n\n self.menuTableVIew.dataSource = self.menuDataSource;\n self.menuTableVIew.delegate = self;\n}\n\n- (void)didReceiveMemoryWarning\n{\n [super didReceiveMemoryWarning];\n // Dispose of any resources that can be recreated.\n}\n\n- (void)viewDidLayoutSubviews\n{\n [super viewDidLayoutSubviews];\n \n [self.view layoutSubviews];\n}\n\n@end\n", "meta": {"content_hash": "e44a1da2dc282a12d3cb6ccdff7262d5", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 91, "avg_line_length": 25.049180327868854, "alnum_prop": 0.6230366492146597, "repo_name": "ykmt/MovingView", "id": "ec762c70f6032f61ac6ad6410f91607e54abd0ab", "size": "1779", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "MovingView/MenuViewController.m", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Objective-C", "bytes": "31187"}]}} {"text": "require 'chefspec'\nrequire 'chefspec/berkshelf'\n\nRSpec.configure do |config|\n config.log_level = :fatal\n\n # Guard against people using deprecated RSpec syntax\n config.raise_errors_for_deprecations!\n\n # Why aren't these the defaults?\n config.filter_run focus: true\n config.run_all_when_everything_filtered = true\n\n # Set a default platform (this is overriden as needed)\n config.platform = 'freebsd'\n config.version = '12'\n\n # Be random!\n config.order = 'random'\nend\n", "meta": {"content_hash": "c6094ae385dd4921dc7808bd4def2a5c", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 56, "avg_line_length": 23.95, "alnum_prop": 0.7265135699373695, "repo_name": "chef-cookbooks/freebsd", "id": "8a8ff7dedca28b8334763687ff3b27412ec97222", "size": "479", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "spec/spec_helper.rb", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Ruby", "bytes": "11492"}]}} {"text": "\n\nimport com.google.common.collect.Lists;\nimport jetbrains.buildServer.BaseTestCase;\nimport jetbrains.buildServer.vsoRooms.notificator.VSOTeamRoomIdsCache;\nimport jetbrains.buildServer.vsoRooms.rest.TeamRoom;\nimport jetbrains.buildServer.vsoRooms.rest.VSOTeamRoomsAPIConnection;\nimport org.jmock.Expectations;\nimport org.jmock.Mockery;\nimport org.testng.annotations.BeforeMethod;\nimport org.testng.annotations.Test;\n\n/**\n * @author Evgeniy.Koshkin\n */\npublic class VSOTeamRoomIdsCacheTest extends BaseTestCase {\n\n private VSOTeamRoomIdsCache myCache;\n private Mockery myMockery;\n\n @Override\n @BeforeMethod\n public void setUp() throws Exception {\n super.setUp();\n myCache = new VSOTeamRoomIdsCache();\n myMockery = new Mockery();\n }\n\n @Test\n public void test_room_names_collision_in_separate_accounts() throws Exception {\n final VSOTeamRoomsAPIConnection apiConnection = myMockery.mock(VSOTeamRoomsAPIConnection.class);\n myMockery.checking(new Expectations() {{\n one(apiConnection).getListOfRooms(\"accountA\");\n will(returnValue(Lists.newArrayList(new TeamRoom(1L, \"room\", \"accountA room\"))));\n one(apiConnection).getListOfRooms(\"accountB\");\n will(returnValue(Lists.newArrayList(new TeamRoom(2L, \"room\", \"accountB room\"))));\n }});\n assertEquals(Long.valueOf(1), myCache.getOrResolveRoomId(\"accountA\", \"room\", apiConnection));\n assertEquals(Long.valueOf(2), myCache.getOrResolveRoomId(\"accountB\", \"room\", apiConnection));\n myMockery.assertIsSatisfied();\n }\n\n @Test\n public void should_be_case_insensitive_for_team_room_name() throws Exception {\n final VSOTeamRoomsAPIConnection apiConnection = myMockery.mock(VSOTeamRoomsAPIConnection.class);\n myMockery.checking(new Expectations() {{\n one(apiConnection).getListOfRooms(\"account\");\n will(returnValue(Lists.newArrayList(new TeamRoom(1L, \"Room\", \"account room\"))));\n }});\n assertEquals(Long.valueOf(1), myCache.getOrResolveRoomId(\"account\", \"ROOM\", apiConnection));\n assertEquals(Long.valueOf(1), myCache.getOrResolveRoomId(\"account\", \"room\", apiConnection));\n assertEquals(Long.valueOf(1), myCache.getOrResolveRoomId(\"account\", \"Room\", apiConnection));\n myMockery.assertIsSatisfied();\n }\n\n @Test\n public void should_be_case_insensitive_for_account() throws Exception {\n final VSOTeamRoomsAPIConnection apiConnection = myMockery.mock(VSOTeamRoomsAPIConnection.class);\n myMockery.checking(new Expectations() {{\n one(apiConnection).getListOfRooms(\"account\");\n will(returnValue(Lists.newArrayList(new TeamRoom(1L, \"room\", \"account room\"))));\n }});\n assertEquals(Long.valueOf(1), myCache.getOrResolveRoomId(\"account\", \"room\", apiConnection));\n assertEquals(Long.valueOf(1), myCache.getOrResolveRoomId(\"Account\", \"room\", apiConnection));\n assertEquals(Long.valueOf(1), myCache.getOrResolveRoomId(\"ACCOUNT\", \"room\", apiConnection));\n myMockery.assertIsSatisfied();\n }\n\n @Test\n public void should_resolve_all_account_room_ids_on_first_call() throws Exception {\n final VSOTeamRoomsAPIConnection apiConnection = myMockery.mock(VSOTeamRoomsAPIConnection.class);\n myMockery.checking(new Expectations() {{\n one(apiConnection).getListOfRooms(\"account\");\n will(returnValue(Lists.newArrayList(new TeamRoom(1L, \"room1\", \"account room 1\"), new TeamRoom(2L, \"room2\", \"account room 2\"))));\n }});\n assertEquals(Long.valueOf(1), myCache.getOrResolveRoomId(\"account\", \"room1\", apiConnection));\n assertEquals(Long.valueOf(2), myCache.getOrResolveRoomId(\"account\", \"room2\", apiConnection));\n }\n}\n", "meta": {"content_hash": "09b11d0e3e2514debcb8d378584bf1d1", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 134, "avg_line_length": 45.164556962025316, "alnum_prop": 0.7480381165919282, "repo_name": "JetBrains/tc-vso-rooms", "id": "05be5b05bf48c5fb274e46dac9aca6365229e087", "size": "4168", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/src/VSOTeamRoomIdsCacheTest.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "791"}, {"name": "FreeMarker", "bytes": "13486"}, {"name": "Java", "bytes": "90553"}, {"name": "JavaScript", "bytes": "3180"}]}} {"text": "namespace device {\n\n// static\nbase::WeakPtr BluetoothLocalGattService::Create(\n BluetoothAdapter* adapter,\n const BluetoothUUID& uuid,\n bool is_primary,\n BluetoothLocalGattService* included_service,\n BluetoothLocalGattService::Delegate* delegate) {\n bluez::BluetoothAdapterBlueZ* adapter_bluez =\n static_cast(adapter);\n bluez::BluetoothLocalGattServiceBlueZ* service =\n new bluez::BluetoothLocalGattServiceBlueZ(adapter_bluez, uuid, is_primary,\n delegate);\n return service->weak_ptr_factory_.GetWeakPtr();\n}\n\n} // namespace device\n\nnamespace bluez {\n\nBluetoothLocalGattServiceBlueZ::BluetoothLocalGattServiceBlueZ(\n BluetoothAdapterBlueZ* adapter,\n const device::BluetoothUUID& uuid,\n bool is_primary,\n device::BluetoothLocalGattService::Delegate* delegate)\n : BluetoothGattServiceBlueZ(\n adapter,\n AddGuidToObjectPath(adapter->GetApplicationObjectPath().value() +\n \"/service\")),\n uuid_(uuid),\n is_primary_(is_primary),\n delegate_(delegate),\n weak_ptr_factory_(this) {\n VLOG(1) << \"Creating local GATT service with identifier: \" << GetIdentifier();\n adapter->AddLocalGattService(base::WrapUnique(this));\n}\n\nBluetoothLocalGattServiceBlueZ::~BluetoothLocalGattServiceBlueZ() {}\n\ndevice::BluetoothUUID BluetoothLocalGattServiceBlueZ::GetUUID() const {\n return uuid_;\n}\n\nbool BluetoothLocalGattServiceBlueZ::IsPrimary() const {\n return is_primary_;\n}\n\nvoid BluetoothLocalGattServiceBlueZ::Register(\n const base::Closure& callback,\n const ErrorCallback& error_callback) {\n GetAdapter()->RegisterGattService(this, callback, error_callback);\n}\n\nvoid BluetoothLocalGattServiceBlueZ::Unregister(\n const base::Closure& callback,\n const ErrorCallback& error_callback) {\n DCHECK(GetAdapter());\n GetAdapter()->UnregisterGattService(this, callback, error_callback);\n}\n\nbool BluetoothLocalGattServiceBlueZ::IsRegistered() {\n return GetAdapter()->IsGattServiceRegistered(this);\n}\n\nvoid BluetoothLocalGattServiceBlueZ::Delete() {\n weak_ptr_factory_.InvalidateWeakPtrs();\n GetAdapter()->RemoveLocalGattService(this);\n}\n\ndevice::BluetoothLocalGattCharacteristic*\nBluetoothLocalGattServiceBlueZ::GetCharacteristic(\n const std::string& identifier) {\n const auto& service = characteristics_.find(dbus::ObjectPath(identifier));\n return service == characteristics_.end() ? nullptr : service->second.get();\n};\n\nconst std::map>&\nBluetoothLocalGattServiceBlueZ::GetCharacteristics() const {\n return characteristics_;\n}\n\n// static\ndbus::ObjectPath BluetoothLocalGattServiceBlueZ::AddGuidToObjectPath(\n const std::string& path) {\n std::string GuidString = base::GenerateGUID();\n base::RemoveChars(GuidString, \"-\", &GuidString);\n\n return dbus::ObjectPath(path + GuidString);\n}\n\nvoid BluetoothLocalGattServiceBlueZ::AddCharacteristic(\n std::unique_ptr characteristic) {\n characteristics_[characteristic->object_path()] = std::move(characteristic);\n}\n\n} // namespace bluez\n", "meta": {"content_hash": "046c85ff169f4b53c3b1273284400a27", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 80, "avg_line_length": 32.88775510204081, "alnum_prop": 0.7381321749922433, "repo_name": "wuhengzhi/chromium-crosswalk", "id": "36fe94fe3691b8d344e81d8fafe18c2307e7e07d", "size": "3673", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "device/bluetooth/bluez/bluetooth_local_gatt_service_bluez.cc", "mode": "33188", "license": "bsd-3-clause", "language": []}} {"text": "\"\"\"\nTests For Cells Utility methods\n\"\"\"\nimport inspect\nimport random\n\nfrom nova.cells import utils as cells_utils\nfrom nova import db\nfrom nova import test\n\n\nclass CellsUtilsTestCase(test.TestCase):\n \"\"\"Test case for Cells utility methods.\"\"\"\n def test_get_instances_to_sync(self):\n fake_context = 'fake_context'\n\n call_info = {'get_all': 0, 'shuffle': 0}\n\n def random_shuffle(_list):\n call_info['shuffle'] += 1\n\n def instance_get_all_by_filters(context, filters,\n sort_key, sort_order):\n self.assertEqual(context, fake_context)\n self.assertEqual(sort_key, 'deleted')\n self.assertEqual(sort_order, 'asc')\n call_info['got_filters'] = filters\n call_info['get_all'] += 1\n return ['fake_instance1', 'fake_instance2', 'fake_instance3']\n\n self.stubs.Set(db, 'instance_get_all_by_filters',\n instance_get_all_by_filters)\n self.stubs.Set(random, 'shuffle', random_shuffle)\n\n instances = cells_utils.get_instances_to_sync(fake_context)\n self.assertTrue(inspect.isgenerator(instances))\n self.assertTrue(len([x for x in instances]), 3)\n self.assertEqual(call_info['get_all'], 1)\n self.assertEqual(call_info['got_filters'], {})\n self.assertEqual(call_info['shuffle'], 0)\n\n instances = cells_utils.get_instances_to_sync(fake_context,\n shuffle=True)\n self.assertTrue(inspect.isgenerator(instances))\n self.assertTrue(len([x for x in instances]), 3)\n self.assertEqual(call_info['get_all'], 2)\n self.assertEqual(call_info['got_filters'], {})\n self.assertEqual(call_info['shuffle'], 1)\n\n instances = cells_utils.get_instances_to_sync(fake_context,\n updated_since='fake-updated-since')\n self.assertTrue(inspect.isgenerator(instances))\n self.assertTrue(len([x for x in instances]), 3)\n self.assertEqual(call_info['get_all'], 3)\n self.assertEqual(call_info['got_filters'],\n {'changes-since': 'fake-updated-since'})\n self.assertEqual(call_info['shuffle'], 1)\n\n instances = cells_utils.get_instances_to_sync(fake_context,\n project_id='fake-project',\n updated_since='fake-updated-since', shuffle=True)\n self.assertTrue(inspect.isgenerator(instances))\n self.assertTrue(len([x for x in instances]), 3)\n self.assertEqual(call_info['get_all'], 4)\n self.assertEqual(call_info['got_filters'],\n {'changes-since': 'fake-updated-since',\n 'project_id': 'fake-project'})\n self.assertEqual(call_info['shuffle'], 2)\n\n def test_split_cell_and_item(self):\n path = 'australia', 'queensland', 'gold_coast'\n cell = cells_utils._PATH_CELL_SEP.join(path)\n item = 'host_5'\n together = cells_utils.cell_with_item(cell, item)\n self.assertEqual(cells_utils._CELL_ITEM_SEP.join([cell, item]),\n together)\n\n # Test normal usage\n result_cell, result_item = cells_utils.split_cell_and_item(together)\n self.assertEqual(cell, result_cell)\n self.assertEqual(item, result_item)\n\n # Test with no cell\n cell = None\n together = cells_utils.cell_with_item(cell, item)\n self.assertEqual(item, together)\n result_cell, result_item = cells_utils.split_cell_and_item(together)\n self.assertEqual(cell, result_cell)\n self.assertEqual(item, result_item)\n", "meta": {"content_hash": "04d15b8373e81751e0aadb6587a4cac3", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 76, "avg_line_length": 40.337078651685395, "alnum_prop": 0.6094707520891365, "repo_name": "sridevikoushik31/nova", "id": "337556282122659f0166cd5cd345b1b240a05fba", "size": "4226", "binary": false, "copies": "3", "ref": "refs/heads/port_id_in_vif_on_devide", "path": "nova/tests/cells/test_cells_utils.py", "mode": "33188", "license": "apache-2.0", "language": [{"name": "JavaScript", "bytes": "7403"}, {"name": "Python", "bytes": "9944606"}, {"name": "Ruby", "bytes": "782"}, {"name": "Shell", "bytes": "17522"}]}} {"text": "namespace omaha {\n\nnamespace internal {\n\nvoid CommandLineParserArgs::Reset() {\n switch_arguments_.clear();\n}\n\n// Assumes switch_name is already lower case.\nHRESULT CommandLineParserArgs::AddSwitch(const CString& switch_name) {\n ASSERT1(CString(switch_name).MakeLower().Compare(switch_name) == 0);\n if (switch_arguments_.find(switch_name) != switch_arguments_.end()) {\n return E_INVALIDARG;\n }\n\n StringVector string_vector;\n switch_arguments_[switch_name] = string_vector;\n return S_OK;\n}\n\n// Assumes switch_name is already lower case.\nHRESULT CommandLineParserArgs::AddSwitchArgument(const CString& switch_name,\n const CString& value) {\n ASSERT1(CString(switch_name).MakeLower().Compare(switch_name) == 0);\n ASSERT1(!switch_name.IsEmpty());\n if (switch_name.IsEmpty()) {\n // We don't have a switch yet, so this is just a base argument.\n // Example command line: \"foo.exe myarg /someswitch\"\n // Here, myarg would be a base argument.\n // TODO(omaha): base_args_.push_back(switch_name_str);\n return E_INVALIDARG;\n }\n\n SwitchAndArgumentsMap::iterator iter = switch_arguments_.find(switch_name);\n if (iter == switch_arguments_.end()) {\n return E_UNEXPECTED;\n }\n (*iter).second.push_back(value);\n\n return S_OK;\n}\n\nint CommandLineParserArgs::GetSwitchCount() const {\n return switch_arguments_.size();\n}\n\nbool CommandLineParserArgs::HasSwitch(const CString& switch_name) const {\n CString switch_name_lower = switch_name;\n switch_name_lower.MakeLower();\n return switch_arguments_.find(switch_name_lower) != switch_arguments_.end();\n}\n\n// The value at a particular index may change if switch_names are added\n// since we're using a map underneath. But this keeps us from having to write\n// an interator and expose it externally.\nHRESULT CommandLineParserArgs::GetSwitchNameAtIndex(int index,\n CString* name) const {\n ASSERT1(name);\n\n if (index >= static_cast(switch_arguments_.size())) {\n return E_INVALIDARG;\n }\n\n SwitchAndArgumentsMapIter iter = switch_arguments_.begin();\n for (int i = 0; i < index; ++i) {\n ++iter;\n }\n\n *name = (*iter).first;\n\n return S_OK;\n}\n\nHRESULT CommandLineParserArgs::GetSwitchArgumentCount(\n const CString& switch_name, int* count) const {\n ASSERT1(count);\n\n CString switch_name_lower = switch_name;\n switch_name_lower.MakeLower();\n\n SwitchAndArgumentsMapIter iter = switch_arguments_.find(switch_name_lower);\n if (iter == switch_arguments_.end()) {\n return E_INVALIDARG;\n }\n\n *count = (*iter).second.size();\n return S_OK;\n}\n\nHRESULT CommandLineParserArgs::GetSwitchArgumentValue(\n const CString& switch_name,\n int argument_index,\n CString* argument_value) const {\n ASSERT1(argument_value);\n\n CString switch_name_lower = switch_name;\n switch_name_lower.MakeLower();\n\n int count = 0;\n HRESULT hr = GetSwitchArgumentCount(switch_name_lower, &count);\n if (FAILED(hr)) {\n return hr;\n }\n\n if (argument_index >= count) {\n return E_INVALIDARG;\n }\n\n SwitchAndArgumentsMapIter iter = switch_arguments_.find(switch_name_lower);\n if (iter == switch_arguments_.end()) {\n return E_INVALIDARG;\n }\n\n *argument_value = (*iter).second[argument_index];\n return S_OK;\n}\n\n} // namespace internal\n\nCommandLineParser::CommandLineParser() {\n}\n\nCommandLineParser::~CommandLineParser() {\n}\n\nHRESULT CommandLineParser::ParseFromString(const wchar_t* command_line) {\n CString command_line_str(command_line);\n command_line_str.Trim(_T(\" \"));\n\n int argc = 0;\n wchar_t** argv = ::CommandLineToArgvW(command_line_str, &argc);\n if (!argv) {\n return HRESULTFromLastError();\n }\n\n HRESULT hr = ParseFromArgv(argc, argv);\n ::LocalFree(argv);\n return hr;\n}\n\n// TODO(Omaha): Move the rule parser into a separate class.\n// TODO(Omaha): Fail the regular command parser if [/ switch is passed.\n// ParseFromArgv parses either a rule or a command line.\n//\n// Rules have required and optional parameters. An example of a rule is:\n// \"gu.exe /install [/oem [/appargs [/silent\"\n// This creates a rule for a command line that requires \"/install\" for the rule\n// to match. The other parameters are optional, indicated by prefixes of \"[/\".\n//\n// Command lines do not use \"[/\", and use \"/\" for all parameters.\n// A command line that looks like this:\n// \"gu.exe /install /oem /appargs \"\n// will match the rule above.\nHRESULT CommandLineParser::ParseFromArgv(int argc, wchar_t** argv) {\n if (argc == 0 || !argv) {\n return E_INVALIDARG;\n }\n\n CORE_LOG(L5, (_T(\"[CommandLineParser::ParseFromArgv][argc=%d]\"), argc));\n\n Reset();\n\n if (argc == 1) {\n // We only have the program name. So, we're done parsing.\n ASSERT1(!IsSwitch(argv[0]));\n return S_OK;\n }\n\n CString current_switch_name;\n bool is_optional_switch = false;\n\n // Start parsing at the first argument after the program name (index 1).\n for (int i = 1; i < argc; ++i) {\n HRESULT hr = S_OK;\n CString token = argv[i];\n token.Trim(_T(\" \"));\n CORE_LOG(L5, (_T(\"[Parsing arg][i=%d][argv[i]=%s]\"), i, token));\n if (IsSwitch(token)) {\n hr = StripSwitchNameFromArgv(token, ¤t_switch_name);\n if (FAILED(hr)) {\n return hr;\n }\n hr = AddSwitch(current_switch_name);\n if (FAILED(hr)) {\n CORE_LOG(LE, (_T(\"[AddSwitch failed][%s][0x%x]\"),\n current_switch_name, hr));\n return hr;\n }\n is_optional_switch = false;\n } else if (IsOptionalSwitch(token)) {\n hr = StripOptionalSwitchNameFromArgv(token, ¤t_switch_name);\n if (FAILED(hr)) {\n return hr;\n }\n hr = AddOptionalSwitch(current_switch_name);\n if (FAILED(hr)) {\n CORE_LOG(LE, (_T(\"[AddOptionalSwitch failed][%s][0x%x]\"),\n current_switch_name, hr));\n return hr;\n }\n is_optional_switch = true;\n } else {\n hr = is_optional_switch ?\n AddOptionalSwitchArgument(current_switch_name, token) :\n AddSwitchArgument(current_switch_name, token);\n\n if (FAILED(hr)) {\n CORE_LOG(LE, (_T(\"[Adding switch argument failed][%d][%s][%s][0x%x]\"),\n is_optional_switch, current_switch_name, token, hr));\n return hr;\n }\n }\n }\n\n return S_OK;\n}\n\nbool CommandLineParser::IsSwitch(const CString& param) const {\n // Switches must have a prefix (/) or (-), and at least one character.\n if (param.GetLength() < 2) {\n return false;\n }\n\n // All switches must start with / or -, and not contain any spaces.\n // Since the argv parser strips out the enclosing quotes around an argument,\n // we need to handle the following cases properly:\n // * foo.exe /switch arg -- /switch is a switch, arg is an arg\n // * foo.exe /switch \"/x y\" -- /switch is a switch, '/x y' is an arg and it\n // will get here _without_ the quotes.\n // If param_str starts with / and contains no spaces, then it's a switch.\n return ((param[0] == _T('/')) || (param[0] == _T('-'))) &&\n (param.Find(_T(\" \")) == -1) &&\n (param.Find(_T(\"%20\")) == -1);\n}\n\nbool CommandLineParser::IsOptionalSwitch(const CString& param) const {\n // Optional switches must have a prefix ([/) or ([-), and at least one\n // character.\n return param[0] == _T('[') && IsSwitch(param.Right(param.GetLength() - 1));\n}\n\nHRESULT CommandLineParser::StripSwitchNameFromArgv(const CString& param,\n CString* switch_name) {\n ASSERT1(switch_name);\n\n if (!IsSwitch(param)) {\n return E_INVALIDARG;\n }\n\n *switch_name = param.Right(param.GetLength() - 1);\n switch_name->Trim(_T(\" \"));\n switch_name->MakeLower();\n return S_OK;\n}\n\nHRESULT CommandLineParser::StripOptionalSwitchNameFromArgv(const CString& param,\n CString* name) {\n ASSERT1(name);\n\n if (!IsOptionalSwitch(param)) {\n return E_INVALIDARG;\n }\n\n return StripSwitchNameFromArgv(param.Right(param.GetLength() - 1), name);\n}\n\nvoid CommandLineParser::Reset() {\n required_args_.Reset();\n optional_args_.Reset();\n}\n\nHRESULT CommandLineParser::AddSwitch(const CString& switch_name) {\n ASSERT1(switch_name == CString(switch_name).MakeLower());\n return required_args_.AddSwitch(switch_name);\n}\n\nHRESULT CommandLineParser::AddSwitchArgument(const CString& switch_name,\n const CString& argument_value) {\n ASSERT1(switch_name == CString(switch_name).MakeLower());\n return required_args_.AddSwitchArgument(switch_name, argument_value);\n}\n\nint CommandLineParser::GetSwitchCount() const {\n return required_args_.GetSwitchCount();\n}\n\nbool CommandLineParser::HasSwitch(const CString& switch_name) const {\n return required_args_.HasSwitch(switch_name);\n}\n\n// The value at a particular index may change if switch_names are added\n// since we're using a map underneath. But this keeps us from having to write\n// an interator and expose it externally.\nHRESULT CommandLineParser::GetSwitchNameAtIndex(int index,\n CString* switch_name) const {\n return required_args_.GetSwitchNameAtIndex(index, switch_name);\n}\n\nHRESULT CommandLineParser::GetSwitchArgumentCount(const CString& switch_name,\n int* count) const {\n return required_args_.GetSwitchArgumentCount(switch_name, count);\n}\n\nHRESULT CommandLineParser::GetSwitchArgumentValue(\n const CString& switch_name,\n int argument_index,\n CString* argument_value) const {\n return required_args_.GetSwitchArgumentValue(switch_name,\n argument_index,\n argument_value);\n}\n\nHRESULT CommandLineParser::AddOptionalSwitch(const CString& switch_name) {\n ASSERT1(switch_name == CString(switch_name).MakeLower());\n return optional_args_.AddSwitch(switch_name);\n}\n\nHRESULT CommandLineParser::AddOptionalSwitchArgument(const CString& switch_name,\n const CString& value) {\n ASSERT1(switch_name == CString(switch_name).MakeLower());\n return optional_args_.AddSwitchArgument(switch_name, value);\n}\n\nint CommandLineParser::GetOptionalSwitchCount() const {\n return optional_args_.GetSwitchCount();\n}\n\nbool CommandLineParser::HasOptionalSwitch(const CString& switch_name) const {\n return optional_args_.HasSwitch(switch_name);\n}\n\n// The value at a particular index may change if switch_names are added\n// since we're using a map underneath. But this keeps us from having to write\n// an interator and expose it externally.\nHRESULT CommandLineParser::GetOptionalSwitchNameAtIndex(int index,\n CString* name) const {\n return optional_args_.GetSwitchNameAtIndex(index, name);\n}\n\nHRESULT CommandLineParser::GetOptionalSwitchArgumentCount(const CString& name,\n int* count) const {\n return optional_args_.GetSwitchArgumentCount(name, count);\n}\n\nHRESULT CommandLineParser::GetOptionalSwitchArgumentValue(const CString& name,\n int argument_index,\n CString* val) const {\n return optional_args_.GetSwitchArgumentValue(name,\n argument_index,\n val);\n}\n\n} // namespace omaha\n\n", "meta": {"content_hash": "503ca43fc87a4d8178cc54a2b11a163e", "timestamp": "", "source": "github", "line_count": 354, "max_line_length": 80, "avg_line_length": 32.57344632768361, "alnum_prop": 0.6390599254184373, "repo_name": "taxilian/omaha", "id": "66e9d3fa6bed99af015749e8b1330c759c72bfed", "size": "12373", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "goopdate/command_line_parser.cc", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Assembly", "bytes": "1957"}, {"name": "C", "bytes": "2626743"}, {"name": "C#", "bytes": "2385"}, {"name": "C++", "bytes": "4634004"}, {"name": "Python", "bytes": "103638"}]}} {"text": "const path = require('path')\nconst existsSync = require('exists-sync')\nconst httpProxy = require('http-proxy')\nconst proxy = httpProxy.createProxyServer()\n\nfunction createDevelopmentProxy(app) {\n // add error handling to avoid https://github.com/nodejitsu/node-http-proxy/issues/527\n proxy.on('error', require('./handleProxyError.js'))\n\n // eg /assets/*\n app.all(`${STATS.publicPath}*`, (req, res) => {\n // try to send the file from assets/*\n // if it doesn't exist, use the proxy instead\n const filename = path.join(__dirname, 'assets', path.basename(req.url))\n if (existsSync(filename)) {\n return res.sendFile(filename)\n }\n return proxy.web(req, res, { target: 'http://localhost:8080' })\n })\n}\n\nmodule.exports = createDevelopmentProxy\n", "meta": {"content_hash": "36456f5820120397d0b62f03f0fa11bb", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 88, "avg_line_length": 34.77272727272727, "alnum_prop": 0.6862745098039216, "repo_name": "dferber90/webapp-starter-pack", "id": "b30175a0e2c5a965b4ef2f08c353746a1a27b7b0", "size": "765", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/utils/createDevelopmentProxy.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "2591"}, {"name": "JavaScript", "bytes": "92712"}]}} {"text": "\n */\ninterface RequestSerializerInterface\n{\n /**\n * @param Request $data\n *\n * @return string\n */\n public function serialize(Request $data);\n\n /**\n * @param string $data\n *\n * @return Request\n */\n public function deserialize($data);\n}\n", "meta": {"content_hash": "e1ac5697344436a5e1556ef219259148", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 51, "avg_line_length": 15.8, "alnum_prop": 0.6050632911392405, "repo_name": "integratedfordevelopers/integrated", "id": "d05be5b615b237cf0de38ecb2f6fb2362c8b5ffe", "size": "628", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Common/Channel/Exporter/Queue/RequestSerializerInterface.php", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "4803"}, {"name": "HTML", "bytes": "1438"}, {"name": "JavaScript", "bytes": "64697"}, {"name": "PHP", "bytes": "3152918"}, {"name": "SCSS", "bytes": "48270"}, {"name": "Twig", "bytes": "540896"}]}} {"text": "ACCEPTED\n\n#### According to\nInternational Plant Names Index\n\n#### Published in\nnull\n\n#### Original name\nnull\n\n### Remarks\nnull", "meta": {"content_hash": "46d60a72238c32d606238c822de8be90", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "7f94d476203038d68eb8eae90ddd687e0e9b8753", "size": "175", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Apocynaceae/Plumeria/Plumeria stenophylla/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "var SecretHandshake = require('./secret_handshake');\n\ndescribe(\"Secret Handshake\", function() {\n it(\"1 is a wink\", function() {\n var handshake = new SecretHandshake(1);\n expect(handshake.commands()).toEqual([\"wink\"]);\n });\n\n xit(\"10 is a double blink\", function() {\n var handshake = new SecretHandshake(2);\n expect(handshake.commands()).toEqual([\"double blink\"]);\n });\n\n xit(\"100 is close your eyes\", function() {\n var handshake = new SecretHandshake(4);\n expect(handshake.commands()).toEqual([\"close your eyes\"]);\n });\n\n xit(\"1000 is jump\", function() {\n var handshake = new SecretHandshake(8);\n expect(handshake.commands()).toEqual([\"jump\"]);\n });\n\n xit(\"11 is wink and double blink\", function() {\n var handshake = new SecretHandshake(3);\n expect(handshake.commands()).toEqual([\"wink\",\"double blink\"]);\n });\n\n xit(\"10011 is double blink and wink\", function() {\n var handshake = new SecretHandshake(19);\n expect(handshake.commands()).toEqual([\"double blink\",\"wink\"]);\n });\n\n xit(\"11111 is jump, close your eyes, double blink, and wink\", function() {\n var handshake = new SecretHandshake(31);\n expect(handshake.commands()).toEqual([\"jump\",\"close your eyes\",\"double blink\",\"wink\"]);\n });\n\n xit(\"text is an invalid secret handshake\", function() {\n expect( function () {\n var handshake = new SecretHandshake(\"piggies\");\n }).toThrow(new Error(\"Handshake must be a number\"));\n });\n});\n", "meta": {"content_hash": "74dbebef1497961e89cb134722e3ef90", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 91, "avg_line_length": 32.93181818181818, "alnum_prop": 0.6487232574189096, "repo_name": "nicgallardo/xjavascript", "id": "37b4a27a56ba3a7c8dd004126a4a3f5fbce3e4d9", "size": "1449", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "secret-handshake/secret_handshake_test.spec.js", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "172436"}, {"name": "Makefile", "bytes": "1019"}, {"name": "Shell", "bytes": "640"}]}} {"text": "\ufeff\n## Current Release\n\n## Version 4.3.1\n\n## Version 4.3.0\n* Fix for issue: https://github.com/Azure/azure-powershell/issues/4323\n\n## Version 4.2.1\n\n## Version 4.2.0\n* Added support for user managed KeyVault key rotations in the Set-AzureRMDataLakeStoreAccount cmdlet\n* Added a quality of life update to automatically trigger an `enableKeyVault` call when a user managed KeyVault is added or a key is rotated.\n* Updated the token audience for job and catalog APIs to use the correct Data Lake specific audience instead of the Azure Resource audience.\n* Fixed a bug limiting the size of files created/appended using the following cmdlets:\n - New-AzureRmDataLakeStoreItem\n - Add-AzureRmDataLakeStoreItemContent\n\n## Version 4.1.0\n* Enable-AzureRmDataLakeStoreKeyVault (Enable-AdlStoreKeyVault)\n * Enable KeyVault managed encryption for a DataLake Store\n\n## Version 4.0.1\n\n## Version 4.0.0\n* For `Import-AzureRMDataLakeStoreItem` and `Export-AzureRMDataLakeStoreItem` trace logging has been disabled by default to improve performance. If trace logging is desired please use the `-DiagnosticLogLevel` and `-DiagnosticLogPath` parameters\n* Fixed a bug that would sometimes cause PowerShell to crash when uploading lots of small file to ADLS.\n\n## Version 3.6.0\n* Add support for head and tail to the `Get-AzureRMDataLakeStoreItemContent` cmdlet. This enables returning the top N or last N new line delimited rows to be displayed.\n\n## Version 3.5.0\n\n## Version 3.4.0\n* Update Upload and Download commands to use the new and improved Upload/Download helpers in the new DataLake.Store clients. This also gives better diagnostic logging, if enabled.\n* Default thread counts for Upload and download are now computed on a best effort basis based on the data being uploaded or downloaded. This should allow for good performance without specifying a thread count.\n* Update to Set-AzureRMDataLakeStoreAccount to allow for enabling and disabling Azure originating IPs through the firewall\n* Add warnings to Add and Set-AzureRMDataLakeStoreFirewallRule and AzureRMDataLakeStoreTrustedIdProvider if they are disabled\n* Remove explicit restrictions on resource locations. If Data Lake Store is not supported in a region, we will surface an error from the service.\n\n## Version 3.3.0\n* Updated help for all cmdlets to include output as well as more descriptions of parameters and the inclusion of aliases.\n* Update New-AdlStore and Set-AdlStore to support commitment tier options for the service.\n* Added OutputType mismatch warnings to all cmdlets with incorrect OutputType attributes. These will be fixed in a future breaking change release.\n* Add Diagnostic logging support to Import-AdlStoreItem and Export-AdlStoreItem. This can be enabled through the following parameters:\n * -Debug, enables full diagnostic logging as well as debug logging to the PowerShell console. Most verbose options\n * -DiagnosticLogLevel, allows finer control of the output than debug. If used with debug, this is ignored and debug logging is used.\n * -DiagnosticLogPath, optionally specify the file to write diagnostic logs to. By default it is written to a file under %LOCALAPPDATA%\\AdlDataTransfer\n* Added support to New-AdlStore to explicitly opt-out of account encryption. To do so, create the account with the -DisableEncryption flag.\n\n## Version 3.2.0\n* Introduction of deprecation warning for nested properties for all ARM resources. Nested properties will be removed in a future release and all properties will be moved one level up.\n* Removed the ability to set encryption in Set-AzureRMDataLakeStoreAccount (never was supported)\n* Added ability to enable/disable firewall rules and the trusted id providers during Set-AzureRMDataLakeStoreAccount\n* Added a new cmdlet: Set-AzureRMDataLakeStoreItemExpiry, which allows the user to set or remove the expiration for files (not folders) in their ADLS account.\n* Small fix for friendly date properties to pivot off UTC time instead of local time, ensuring standard time reporting.\n\n## Version 3.1.0\n* Improvements to import and export data cmdlets\n - Drastically increased performance for distributed download scenarios, where multiple sessions are running across many clients targeting the same ADLS account.\n - Better error handling and messaging for both upload and download scenarios.\n* Full Firewall rules management CRUD\n - The below cmdlets can be used to manage firewall rules for an ADLS account:\n - Add-AzureRMDataLakeStoreFirewallRule\n - Set-AzureRMDataLakeStoreFirewallRule\n - Get-AzureRMDataLakeStoreFirewallRule\n - Remove-AzureRMDataLakeStoreFirewallRule\n* Full Trusted ID provider management CRUD\n - The below cmdlets can be used to manage trusted identity providers for an ADLS account:\n - Add-AzureRMDataLakeStoreTrustedIdProvider\n - Set-AzureRMDataLakeStoreTrustedIdProvider\n - Get-AzureRMDataLakeStoreTrustedIdProvider\n - Remove-AzureRMDataLakeStoreTrustedIdProvider\n* Account Encryption Support\n - You can now encrypt newly created ADLS accounts as well as enable encryption on existing ADLS accounts using the New-AzureRMDataLakeStoreAccount and Set-AzureRMDataLakeStoreAccount cmdlets, respectively.", "meta": {"content_hash": "0a066b9b9ec7b8b29a51870d4e0209f8", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 245, "avg_line_length": 62.924731182795696, "alnum_prop": 0.7886192754613808, "repo_name": "hungmai-msft/azure-powershell", "id": "96b0ac4b9ca2dda18fc7669a13354a4628dd2fb3", "size": "5854", "binary": false, "copies": "2", "ref": "refs/heads/preview", "path": "src/ResourceManager/DataLakeStore/ChangeLog.md", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "16509"}, {"name": "C#", "bytes": "39328004"}, {"name": "HTML", "bytes": "209"}, {"name": "JavaScript", "bytes": "4979"}, {"name": "PHP", "bytes": "41"}, {"name": "PowerShell", "bytes": "3983165"}, {"name": "Ruby", "bytes": "265"}, {"name": "Shell", "bytes": "50"}, {"name": "XSLT", "bytes": "6114"}]}} {"text": "/**\n * \\file\n * \\brief Implementation of the necessary initialization for the RadauII-A solver\n *\n * \\author Nicholas Curtis\n * \\date 03/09/2015\n *\n */\n\n#ifdef GENERATE_DOCS\nnamespace radau2a {\n#endif\n\n void initialize_solver() {\n }\n\n/*!\n \\fn char* solver_name()\n \\brief Returns a descriptive solver name\n*/\n const char* solver_name() {\n \tconst char* name = \"radau2a-int\";\n \treturn name;\n }\n\n void cleanup_solver() {\n \t//nothing to do\n }\n\n void init_solver_log() {\n\n }\n\n void solver_log() {\n\n }\n\n#ifdef GENERATE_DOCS\n}\n#endif", "meta": {"content_hash": "3a3425e943cf2b7a5e70198f55b8c6b8", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 81, "avg_line_length": 13.225, "alnum_prop": 0.6483931947069943, "repo_name": "SLACKHA/accelerInt", "id": "24ada842cfa2172e3141dbf1e75da8e1ed93cb80", "size": "529", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "radau2a/radau2a_init.c", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "144786"}, {"name": "C++", "bytes": "16646"}, {"name": "Cuda", "bytes": "183418"}, {"name": "Python", "bytes": "80505"}, {"name": "Shell", "bytes": "3317"}]}} {"text": "var fork = require(\"child_process\").fork;\nvar path = require(\"path\");\n\nvar bin = \"./node_modules/.bin/istanbul\";\nvar cov = \"cover --report=lcov --dir=test/coverage/js _mocha --\".split(\" \");\n\nif (process.platform === 'win32') {\n bin = \"./node_modules/mocha/bin/mocha\";\n cov = [];\n}\n\nvar args = cov.concat([\n \"test/runner\",\n \"test/tests\",\n \"--expose-gc\",\n \"--timeout\",\n \"15000\"\n]);\n\nif (!process.env.APPVEYOR && !process.env.TRAVIS) {\n var local = path.join.bind(path, __dirname);\n var dummyPath = local(\"home\");\n process.env.HOME = dummyPath;\n process.env.USERPROFILE = dummyPath;\n}\n\nfork(bin, args, { cwd: path.join(__dirname, \"../\") }).on(\"close\", process.exit);\n", "meta": {"content_hash": "b90dc19d2f75e8fc7ca8f44b6b31a228", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 80, "avg_line_length": 25.0, "alnum_prop": 0.6207407407407407, "repo_name": "jdgarcia/nodegit", "id": "52cbff50ef10cbcd44564023d9eec225aab528d8", "size": "675", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "test/index.js", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "1789"}, {"name": "C++", "bytes": "128326"}, {"name": "JavaScript", "bytes": "439349"}, {"name": "Python", "bytes": "2142"}, {"name": "Shell", "bytes": "864"}]}} {"text": "/*\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.addthis.hydra.data.query.engine;\n\nimport java.util.concurrent.ScheduledExecutorService;\nimport java.util.concurrent.ScheduledThreadPoolExecutor;\nimport java.util.concurrent.TimeUnit;\n\nimport com.addthis.basis.util.Parameter;\n\nimport com.google.common.base.Objects;\nimport com.google.common.cache.CacheBuilder;\nimport com.google.common.cache.LoadingCache;\nimport com.google.common.util.concurrent.MoreExecutors;\nimport com.google.common.util.concurrent.ThreadFactoryBuilder;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class implements an LRU cache to keep our QueryEngines. It is instantiated only from MeshQuerySource.\n *

\n * It uses guava's cache loader to do most of the work. We periodically check to see if new data is available for\n * a job, and if so, asynchronously prepare the new database before swapping it in. Multiple get or refresh attempts\n * will block and wait on the existing one to finish.\n *

\n * As per guava's specs, it is not guaranteed that we will wait until we are at maximum capacity to evict engines.\n * Also, we are okay with evicting non-idle engines, but we do not force them to close. Rather, we set a flag and\n * trust the query using them to close it when it is finished. This means we may have a number of engines open equal\n * to the cache capacity + number of running queries. It is also possible for a few engines to be transiently open\n * while waiting for the eviction listener to close engines. This is somewhat balanced by guava's more aggressive\n * eviction policy, but in general we should not rely on the capacity as being an absolute hard max. In practice, it\n * should be more than sufficient though.\n *

\n * Basic flow is :\n * Constructed from MQSource\n * MQSource calls getAndLease()\n * See if we have a suitable engine\n * If so, return it, if not, make one and return it\n */\npublic class QueryEngineCache {\n\n private static final Logger log = LoggerFactory.getLogger(QueryEngineCache.class);\n\n /**\n * 'soft cap' on the number of engines to have open. this + concurrent queries +/- a few should closely\n * resemble the real cap on open engines\n */\n private static final long DEFAULT_ENGINE_CACHE_SIZE = Parameter.longValue(\"queryEngineCache.engineCacheSize\", 5);\n\n /**\n * seconds to let an engine be in cache before attempting to refresh it. Refreshing it means checking whether\n * or not the job has finished running and has a new data directory; it does not force the reopening of the same\n * directory. It is important to note that this scheduled refresh is not checked unless a get is called on it,\n * and that even if the refresh returns the old engine, it resets the fail timer.\n */\n private static final long DEFAULT_REFRESH_INTERVAL = Parameter.longValue(\"queryEngineCache.refreshInterval\", 2 * 60);\n\n /**\n * seconds in between cache malongenance runs. This helps query sources and jobs in lower throughput environments.\n * It does the guava api clean up method which handles any pending expiration events, and also attempts to provoke\n * refresh attempts on cached keys by calling get on them. The latter is more important for our purposes. Without it,\n * relatively idle engines would become stale or subject to undesired eviction by the fail longerval. 0 disables it.\n */\n private static final long DEFAULT_MAINTENANCE_INTERVAL = Parameter.longValue(\"queryEngineCache.maintenanceInterval\", 20 * 60);\n\n /**\n * seconds to let an engine be in cache after the most recent write. This is longended only for situations\n * where re-opening that engine is failing, and thus while the refresh is not occuring. it might appear that\n * an engine is alive and up to date and this attempts to limit that disparity if desired. Note that by failing,\n * we mean that the refresh method is throwing exceptions.\n */\n private static final long DEFAULT_FAIL_INTERVAL = Parameter.longValue(\"queryEngineCache.failInterval\", 70 * 60);\n\n /**\n * thread pool for cache maintenance runs. Should only need one thread.\n */\n private final ScheduledExecutorService queryEngineCacheMaintainer = MoreExecutors\n .getExitingScheduledExecutorService(new ScheduledThreadPoolExecutor(1,\n new ThreadFactoryBuilder().setNameFormat(\"queryEngineCacheMaintainer=%d\").build()));\n\n /**\n * The {@link LoadingCache} that provides the backing data structure for this class.\n * Acts like an intelligent semi-persistent Map that has logic for loading and reloading complex objects.\n */\n protected final LoadingCache loadingEngineCache;\n\n private final long engineCacheSize;\n private final long refreshInterval;\n private final long failInterval;\n private final long maintenanceInterval;\n\n /**\n * Initialize a {@link LoadingCache} that is capable of loading and reloading\n * {@link QueryEngine}s. Reloads occur asynchronously to prevent blocking operations\n * during unrelated calls to the cache. When reload is called the current engine will be compared with the\n * newest available data directory. If the current engine is up to date it will be returned, otherwise a new\n * engine will be opened to replace the current engine with the latest available.\n *

\n * On removal, we have a listener that will call closeWhenIdle on engines. It has a guard against removal events\n * generated by refreshes where we decide to keep the existing engine (no new job data is available). There is a\n * race condition where that test can be passed more than once so any clean up done there must be okay with that.\n * The race condition is such that the test will always be passed at least once, and never when the engine is still\n * available to new get calls. This meets our requirements.\n */\n public QueryEngineCache() {\n this(DEFAULT_ENGINE_CACHE_SIZE, DEFAULT_REFRESH_INTERVAL, DEFAULT_FAIL_INTERVAL, DEFAULT_MAINTENANCE_INTERVAL);\n }\n\n public QueryEngineCache(long engineCacheSize, long refreshInterval, long failInterval, long maintenanceInterval) {\n this(engineCacheSize, refreshInterval, failInterval, maintenanceInterval, new EngineLoader());\n }\n\n public QueryEngineCache(long engineCacheSize, long refreshInterval, long failInterval, long maintenanceInterval,\n EngineLoader engineLoader) {\n this.engineCacheSize = engineCacheSize;\n this.refreshInterval = refreshInterval;\n this.failInterval = failInterval;\n this.maintenanceInterval = maintenanceInterval;\n\n log.info(\"Initializing QueryEngineCache: {}\", this); //using 'this' is just more efficient\n\n // no easy way around escaping 'this' here, but at least it is more obvious what is going on now\n loadingEngineCache = CacheBuilder.newBuilder()\n .maximumWeight(engineCacheSize * 100)\n .weigher(\n (dir, engine) -> (int) (100 * engine.getTree().getAdvancedSettings().cacheWeight()))\n .refreshAfterWrite(refreshInterval, TimeUnit.SECONDS)\n .expireAfterWrite(failInterval, TimeUnit.SECONDS)\n .removalListener(new EngineRemovalListener(this))\n .build(engineLoader);\n\n //schedule maintenance runs\n maybeInitMaintenance();\n }\n\n\n /**\n * schedules maintenance for the cache using the maintenanceInterval parameter. Values less than 1\n * are treated as 'do not do maintenance'. Maintenance includes cache loader cleanUp() and an attempt\n * to trigger refreshes in relatively idle engines. This is done by the thread safe iterator from\n * the loading cache and performing getIfPresent calls on each entry. This will only trigger refreshes\n * if the refresh interval has passed, and avoids a potential race condition where doing refresh() could\n * end up re-loading an engine that was just evicted. This is important because in addition to being\n * incorrect cache behavior, refresh will block instead of being asynchronous while doing so -- possibly\n * leading to even more race conditions.\n *

\n * since the thread safe iterator is weakly consistent, it is a good idea to configure the intervals so\n * that maintenance will be performed more than once before the fail interval occurs (if we do not desire\n * to evict and close 'relatively idle' engines). eg. maintenanceInterval * 2 < failInterval\n *

\n * unfortunately, this somewhat confuses the eviction order heuristic because it considers these all to be\n * valid r/ws. This is one reason to keep this value relatively long. It is possible to optimize against this\n * somewhat, but probably at the cost of greatly increased complexity. It seems unlikely that it will have a\n * large impact if performed infrequently enough though, especially since the evictor is not a simple LRU.\n */\n private void maybeInitMaintenance() {\n if (maintenanceInterval > 0) {\n queryEngineCacheMaintainer.scheduleAtFixedRate(() -> {\n loadingEngineCache.cleanUp();\n loadingEngineCache.asMap().keySet().forEach(loadingEngineCache::getIfPresent);\n }, maintenanceInterval, maintenanceInterval, TimeUnit.SECONDS);\n }\n }\n\n /**\n * Takes an unresolved (usually the gold path) path to a bdb query directory. This is mostly a thin\n * layer between this class and the backing LoadingCache.\n *

\n * Most importantly, it also attempts to lease the engine. This is because there is a rare race condition\n * where after acquiring the engine, but before leasing it ourselves, it is evicted from the cache. Probably\n * caused by refresh, since it is less likely that an engine we just acquired would be the target of size\n * eviction in most cases. It is relatively unlikely to happen even twice in a row, but we try three times\n * here anyway. I have never seen this exception but if we start to see it a lot, we can re-evaluate this approach.\n *\n * @param directoryPath The path of the engine directory\n * @return a QueryEngine from the cache or constructed on demand (constructing blocks this thread)\n * @throws Exception - any problem while getting the engine. Likely either an issue with leasing or with opening an engine\n */\n public QueryEngine getAndLease(String directoryPath) throws Exception {\n for (int i = 0; i < 3; i++) {\n QueryEngine qe = loadingEngineCache.get(directoryPath);\n if (qe.lease()) {\n return qe;\n }\n }\n log.warn(\"Tried three times but unable to get lease for engine with path: {}\", directoryPath);\n throw new RuntimeException(\"Can't lease engine\");\n }\n\n @Override\n public String toString() {\n return Objects.toStringHelper(this)\n .add(\"engineCacheSize\", engineCacheSize)\n .add(\"refreshInterval\", refreshInterval)\n .add(\"maintenanceInterval\", maintenanceInterval)\n .add(\"failInterval\", failInterval)\n .toString();\n }\n\n}\n", "meta": {"content_hash": "adc024dcbfaacae34001cd8e0682b7a5", "timestamp": "", "source": "github", "line_count": 212, "max_line_length": 130, "avg_line_length": 55.60849056603774, "alnum_prop": 0.7211807617270337, "repo_name": "mythguided/hydra", "id": "12e6ca02fa41c099d8300355623fa6ff8cc95be5", "size": "11789", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hydra-data/src/main/java/com/addthis/hydra/data/query/engine/QueryEngineCache.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "37939"}, {"name": "HTML", "bytes": "103283"}, {"name": "Java", "bytes": "4732762"}, {"name": "JavaScript", "bytes": "582863"}, {"name": "Shell", "bytes": "13923"}]}} {"text": "{-# LANGUAGE CPP #-}\n{-# LANGUAGE ConstraintKinds #-}\n{-# LANGUAGE DataKinds #-}\n{-# LANGUAGE FlexibleContexts #-}\n{-# LANGUAGE FlexibleInstances #-}\n{-# LANGUAGE ScopedTypeVariables #-}\n{-# LANGUAGE TypeFamilies #-}\n{-# LANGUAGE TypeOperators #-}\n{-# LANGUAGE UndecidableInstances #-}\n\nmodule Servant.Ruby.Internal where\n\nimport Servant.API\nimport Data.Proxy\nimport GHC.Exts (Constraint)\nimport GHC.TypeLits\n\ndata AjaxReq\n\nclass HasRB (layout :: *) where\n type RB layout :: *\n rubyFor :: Proxy layout -> AjaxReq -> RB layout\n", "meta": {"content_hash": "04fe634b44648e221b1026a823d617c6", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 51, "avg_line_length": 26.727272727272727, "alnum_prop": 0.6360544217687075, "repo_name": "parsonsmatt/servant-ruby", "id": "abcc508eb3facdd198c213260c073f31289f2906", "size": "588", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Servant/Ruby/Internal.hs", "mode": "33188", "license": "mit", "language": [{"name": "Haskell", "bytes": "1576"}]}} {"text": "from msrest.serialization import Model\nfrom msrest.exceptions import HttpOperationError\n\n\nclass ErrorResponse(Model):\n \"\"\"The error object.\n\n :param error: Error.\n :type error: ~azure.mgmt.resource.managementgroups.models.ErrorDetails\n \"\"\"\n\n _attribute_map = {\n 'error': {'key': 'error', 'type': 'ErrorDetails'},\n }\n\n def __init__(self, error=None):\n super(ErrorResponse, self).__init__()\n self.error = error\n\n\nclass ErrorResponseException(HttpOperationError):\n \"\"\"Server responsed with exception of type: 'ErrorResponse'.\n\n :param deserialize: A deserializer\n :param response: Server response to be deserialized.\n \"\"\"\n\n def __init__(self, deserialize, response, *args):\n\n super(ErrorResponseException, self).__init__(deserialize, response, 'ErrorResponse', *args)\n", "meta": {"content_hash": "1cea87edd169f394f10b678c53ae0cc7", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 99, "avg_line_length": 27.633333333333333, "alnum_prop": 0.6755126658624849, "repo_name": "AutorestCI/azure-sdk-for-python", "id": "d4af1d388d68ee2114db0e15d458065bc421a7b0", "size": "1303", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "azure-mgmt-resource/azure/mgmt/resource/managementgroups/models/error_response.py", "mode": "33188", "license": "mit", "language": [{"name": "Python", "bytes": "34619070"}]}} {"text": "function matrix(n) {\n const results = [];\n\n for (let i = 0; i < n; i++) {\n results.push([]);\n }\n\n let counter = 1;\n let startColumn = 0;\n let endColumn = n - 1;\n let startRow = 0;\n let endRow = n - 1;\n while (startColumn <= endColumn && startRow <= endRow) {\n // Top row\n for (let i = startColumn; i <= endColumn; i++) {\n results[startRow][i] = counter;\n counter++;\n }\n startRow++;\n\n // Right column\n for (let i = startRow; i <= endRow; i++) {\n results[i][endColumn] = counter;\n counter++;\n }\n endColumn--;\n\n // Bottom row\n for (let i = endColumn; i >= startColumn; i--) {\n results[endRow][i] = counter;\n counter++;\n }\n endRow--;\n\n // start column\n for (let i = endRow; i >= startRow; i--) {\n results[i][startColumn] = counter;\n counter++;\n }\n startColumn++;\n }\n\n return results;\n}\n\nmodule.exports = matrix;\n", "meta": {"content_hash": "6845853c2317bf025a109d0dc5d5a925", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 58, "avg_line_length": 19.782608695652176, "alnum_prop": 0.5274725274725275, "repo_name": "spiresd55/code-playground", "id": "f1d0651e482f080e54421cf74e0c08079627ac1c", "size": "1286", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "completed_exercises/matrix/index.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "899"}, {"name": "HTML", "bytes": "25468"}, {"name": "JavaScript", "bytes": "247128"}, {"name": "TypeScript", "bytes": "44268"}]}} {"text": "\npackage com.wegas.app.jsf.controllers;\n\nimport com.wegas.core.ejb.GameFacade;\nimport com.wegas.core.ejb.PlayerFacade;\nimport com.wegas.core.exception.internal.WegasNoResultException;\nimport com.wegas.core.persistence.game.Game;\nimport com.wegas.core.persistence.game.GameModel;\nimport java.io.IOException;\nimport java.io.Serializable;\nimport javax.annotation.PostConstruct;\nimport javax.ejb.EJB;\nimport javax.enterprise.context.RequestScoped;\nimport javax.faces.bean.ManagedBean;\nimport javax.faces.bean.ManagedProperty;\nimport javax.faces.context.ExternalContext;\nimport javax.faces.context.FacesContext;\nimport javax.inject.Inject;\n\n/**\n *\n * @author Francois-Xavier Aeberhard (fx at red-agent.com)\n */\n@ManagedBean(name = \"singleLobbyController\")\n@RequestScoped\npublic class SingleLobbyController implements Serializable {\n\n /**\n *\n */\n @ManagedProperty(\"#{param.token}\")\n private String token;\n /**\n *\n */\n @EJB\n private GameFacade gameFacade;\n /**\n *\n */\n @EJB\n private PlayerFacade playerFacade;\n /**\n *\n */\n @Inject\n ErrorController errorController;\n /**\n *\n */\n private Game currentGame = null;\n\n /**\n *\n * @fixme rights management\n *\n */\n @PostConstruct\n public void init() {\n final ExternalContext externalContext = FacesContext.getCurrentInstance().getExternalContext();\n\n if (token != null) {\n currentGame = gameFacade.findByToken(token);\n if (currentGame != null) { // 1st case: token is associated with a game\n try {\n playerFacade.findCurrentPlayer(currentGame);\n try {\n externalContext.dispatch(\"game-play.xhtml?gameId=\" + currentGame.getId());// display game page\n } catch (IOException ex) {\n }\n } catch (WegasNoResultException e) {\n // Nothing to do. stay on current page so player will choose his team\n }\n\n //} else { // 2nd case: token is associated with a team\n // final Team currentTeam = teamFacade.findByToken(token);\n // if (currentTeam != null) {\n // try {\n // playerFacade.findCurrentPlayer(currentTeam.getGame());\n // } catch (NoResultException etp) { // Player has not joined yet\n // if (SecurityHelper.isAnyPermitted(currentTeam.getGame(), Arrays.asList(\"Token\", \"TeamToken\", \"View\"))) {\n // teamFacade.joinTeam(currentTeam, userFacade.getCurrentUser()); // so we join him\n // } else {\n // externalContext.dispatch(\"/wegas-app/view/error/accessdenied.xhtml\"); // not allowed\n // }\n // }\n // externalContext.dispatch(\"game-play.xhtml?gameId=\" + currentTeam.getGame().getId());// display game page\n // } else {\n // externalContext.dispatch(\"/wegas-app/view/error/accessdenied.xhtml\"); // no game\n // }\n //}\n } else {\n errorController.dispatch(\"The game you are looking for could not be found.\");\n }\n } else {\n errorController.dispatch(\"The game you are looking for could not be found.\");\n }\n }\n\n /**\n * @return the token\n */\n public String getToken() {\n return token;\n }\n\n /**\n * @param token the token to set\n */\n public void setToken(String token) {\n this.token = token;\n }\n\n /**\n * @return the currentGame\n */\n public Game getCurrentGame() {\n return currentGame;\n }\n\n /**\n * @param currentGame the currentGame to set\n */\n public void setCurrentGame(Game currentGame) {\n this.currentGame = currentGame;\n }\n\n /**\n * @return the current game model\n */\n public GameModel getCurrentGameModel() {\n return currentGame.getGameModel();\n }\n}\n", "meta": {"content_hash": "92e1ec893a8350c0391127422f011016", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 134, "avg_line_length": 31.71641791044776, "alnum_prop": 0.5510588235294117, "repo_name": "ghiringh/Wegas", "id": "2bfa7b5f89e99f28fe0b6e976de2f7ef822ed52f", "size": "4410", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "wegas-app/src/main/java/com/wegas/app/jsf/controllers/SingleLobbyController.java", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "903473"}, {"name": "HTML", "bytes": "195241"}, {"name": "Java", "bytes": "1757507"}, {"name": "JavaScript", "bytes": "4366779"}]}} {"text": "from datetime import timedelta\n\nfrom django.core.management import call_command\n\nfrom tests.test_case import AppTestCase\nfrom wagtailstreamforms.models import Form, FormSubmission\n\n\nclass Tests(AppTestCase):\n fixtures = [\"test\"]\n\n def test_command(self):\n form = Form.objects.get(pk=1)\n to_keep = FormSubmission.objects.create(form=form, form_data={})\n to_delete = FormSubmission.objects.create(form=form, form_data={})\n to_delete.submit_time = to_delete.submit_time - timedelta(days=2)\n to_delete.save()\n\n call_command(\"prunesubmissions\", 1)\n\n FormSubmission.objects.get(pk=to_keep.pk)\n\n with self.assertRaises(FormSubmission.DoesNotExist):\n FormSubmission.objects.get(pk=to_delete.pk)\n", "meta": {"content_hash": "4cca3b3599e9153138fb9bf4ae4582db", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 74, "avg_line_length": 31.666666666666668, "alnum_prop": 0.7, "repo_name": "AccentDesign/wagtailstreamforms", "id": "71bcb5296a3739d9e37fc034c60c318a915da6a2", "size": "760", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/management/test_prunesubmissions.py", "mode": "33188", "license": "mit", "language": [{"name": "Dockerfile", "bytes": "690"}, {"name": "HTML", "bytes": "14735"}, {"name": "JavaScript", "bytes": "213"}, {"name": "Makefile", "bytes": "438"}, {"name": "Python", "bytes": "189375"}, {"name": "SCSS", "bytes": "2257"}, {"name": "Shell", "bytes": "559"}]}} {"text": "package proc\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\t\"math\"\n\t\"strings\"\n\n\t\"github.com/go-delve/delve/pkg/dwarf/op\"\n)\n\n// Registers is an interface for a generic register type. The\n// interface encapsulates the generic values / actions\n// we need independent of arch. The concrete register types\n// will be different depending on OS/Arch.\ntype Registers interface {\n\tPC() uint64\n\tSP() uint64\n\tBP() uint64\n\tLR() uint64\n\tTLS() uint64\n\t// GAddr returns the address of the G variable if it is known, 0 and false otherwise\n\tGAddr() (uint64, bool)\n\tSlice(floatingPoint bool) ([]Register, error)\n\t// Copy returns a copy of the registers that is guaranteed not to change\n\t// when the registers of the associated thread change.\n\tCopy() (Registers, error)\n}\n\n// Register represents a CPU register.\ntype Register struct {\n\tName string\n\tReg *op.DwarfRegister\n}\n\n// AppendUint64Register will create a new Register struct with the name and value\n// specified and append it to the `regs` slice.\nfunc AppendUint64Register(regs []Register, name string, value uint64) []Register {\n\treturn append(regs, Register{name, op.DwarfRegisterFromUint64(value)})\n}\n\n// AppendBytesRegister will create a new Register struct with the name and value\n// specified and append it to the `regs` slice.\nfunc AppendBytesRegister(regs []Register, name string, value []byte) []Register {\n\treturn append(regs, Register{name, op.DwarfRegisterFromBytes(value)})\n}\n\n// ErrUnknownRegister is returned when the value of an unknown\n// register is requested.\nvar ErrUnknownRegister = errors.New(\"unknown register\")\n\ntype flagRegisterDescr []flagDescr\ntype flagDescr struct {\n\tname string\n\tmask uint64\n}\n\nvar mxcsrDescription flagRegisterDescr = []flagDescr{\n\t{\"FZ\", 1 << 15},\n\t{\"RZ/RN\", 1<<14 | 1<<13},\n\t{\"PM\", 1 << 12},\n\t{\"UM\", 1 << 11},\n\t{\"OM\", 1 << 10},\n\t{\"ZM\", 1 << 9},\n\t{\"DM\", 1 << 8},\n\t{\"IM\", 1 << 7},\n\t{\"DAZ\", 1 << 6},\n\t{\"PE\", 1 << 5},\n\t{\"UE\", 1 << 4},\n\t{\"OE\", 1 << 3},\n\t{\"ZE\", 1 << 2},\n\t{\"DE\", 1 << 1},\n\t{\"IE\", 1 << 0},\n}\n\nvar eflagsDescription flagRegisterDescr = []flagDescr{\n\t{\"CF\", 1 << 0},\n\t{\"\", 1 << 1},\n\t{\"PF\", 1 << 2},\n\t{\"AF\", 1 << 4},\n\t{\"ZF\", 1 << 6},\n\t{\"SF\", 1 << 7},\n\t{\"TF\", 1 << 8},\n\t{\"IF\", 1 << 9},\n\t{\"DF\", 1 << 10},\n\t{\"OF\", 1 << 11},\n\t{\"IOPL\", 1<<12 | 1<<13},\n\t{\"NT\", 1 << 14},\n\t{\"RF\", 1 << 16},\n\t{\"VM\", 1 << 17},\n\t{\"AC\", 1 << 18},\n\t{\"VIF\", 1 << 19},\n\t{\"VIP\", 1 << 20},\n\t{\"ID\", 1 << 21},\n}\n\nfunc (descr flagRegisterDescr) Mask() uint64 {\n\tvar r uint64\n\tfor _, f := range descr {\n\t\tr = r | f.mask\n\t}\n\treturn r\n}\n\nfunc (descr flagRegisterDescr) Describe(reg uint64, bitsize int) string {\n\tvar r []string\n\tfor _, f := range descr {\n\t\tif f.name == \"\" {\n\t\t\tcontinue\n\t\t}\n\t\t// rbm is f.mask with only the right-most bit set:\n\t\t// 0001 1100 -> 0000 0100\n\t\trbm := f.mask & -f.mask\n\t\tif rbm == f.mask {\n\t\t\tif reg&f.mask != 0 {\n\t\t\t\tr = append(r, f.name)\n\t\t\t}\n\t\t} else {\n\t\t\tx := (reg & f.mask) >> uint64(math.Log2(float64(rbm)))\n\t\t\tr = append(r, fmt.Sprintf(\"%s=%x\", f.name, x))\n\t\t}\n\t}\n\tif reg & ^descr.Mask() != 0 {\n\t\tr = append(r, fmt.Sprintf(\"unknown_flags=%x\", reg&^descr.Mask()))\n\t}\n\treturn fmt.Sprintf(\"%#0*x\\t[%s]\", bitsize/4, reg, strings.Join(r, \" \"))\n}\n", "meta": {"content_hash": "5fac7ad48d7df82fdd182c2e35a6f8f2", "timestamp": "", "source": "github", "line_count": 127, "max_line_length": 85, "avg_line_length": 24.511811023622048, "alnum_prop": 0.6174108576935432, "repo_name": "go-delve/delve", "id": "185d0615c5eb8fd881fef343a12ddc512255e5c1", "size": "3113", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pkg/proc/registers.go", "mode": "33188", "license": "mit", "language": [{"name": "Assembly", "bytes": "3179"}, {"name": "C", "bytes": "2657334"}, {"name": "Go", "bytes": "2792924"}, {"name": "Kotlin", "bytes": "7688"}, {"name": "Makefile", "bytes": "970"}, {"name": "PowerShell", "bytes": "3085"}, {"name": "Python", "bytes": "599"}, {"name": "Shell", "bytes": "5109"}, {"name": "Starlark", "bytes": "2024"}]}} {"text": "\n\n \n \n \n riscv: Not compatible \ud83d\udc7c\n \n \n \n \n \n \n \n \n \n \n

\n
\n \n
\n
\n
\n
\n \u00ab Up\n

\n riscv\n \n 0.0.3\n Not compatible \ud83d\udc7c\n \n

\n

\ud83d\udcc5 (2022-11-10 23:24:19 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-num            base        Num library distributed with the OCaml compiler\nbase-ocamlbuild     base        OCamlbuild binary and libraries distributed with the OCaml compiler\nbase-threads        base\nbase-unix           base\ncamlp5              7.14        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\nconf-perl           2           Virtual package relying on perl\ncoq                 8.7.1+1     Formal proof management system\nnum                 0           The Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.02.3      The OCaml compiler (virtual package)\nocaml-base-compiler 4.02.3      Official 4.02.3 release\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.5       A library manager for OCaml\n# opam file:\nopam-version: "2.0"\nauthors: [\n  "Massachusetts Institute of Technology"\n]\nmaintainer: "Jason Gross <jgross@mit.edu>"\nhomepage: "https://github.com/mit-plv/riscv-coq"\nbug-reports: "https://github.com/mit-plv/riscv-coq/issues"\nlicense: "BSD-3-Clause"\nbuild: [\n  [make "-j%{jobs}%" "EXTERNAL_DEPENDENCIES=1" "all"]\n]\ninstall: [make "EXTERNAL_DEPENDENCIES=1" "install"]\ndepends: [\n  "coq" {>= "8.15~"}\n  "coq-coqutil" {= "0.0.2"}\n  "coq-record-update" {>= "0.3.0"}\n]\ndev-repo: "git+https://github.com/mit-plv/riscv-coq.git"\nsynopsis: "RISC-V Specification in Coq, somewhat experimental"\ntags: ["logpath:riscv"]\nurl {\n  src: "https://github.com/mit-plv/riscv-coq/archive/refs/tags/v0.0.3.tar.gz"\n  checksum: "sha512=55c6a2aa84c89b5b4224729ccad23504d906d174d8bab9b5e1ff62dd7e76efef4935978c3ba517870d25700a1e563e2b352bb3fba94936807561840f26af75e8"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-riscv.0.0.3 coq.8.7.1+1
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.7.1+1).\nThe following dependencies couldn't be met:\n  - coq-riscv -> coq >= 8.15~ -> ocaml >= 4.05.0\n      base of this switch (use `--unlock-base' to force)\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-riscv.0.0.3
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "500a50feebf1f5c4e792362c3876765f", "timestamp": "", "source": "github", "line_count": 166, "max_line_length": 159, "avg_line_length": 41.566265060240966, "alnum_prop": 0.5395652173913044, "repo_name": "coq-bench/coq-bench.github.io", "id": "a2d72cb16b363e6581ce4064c8dfafbe665c61e2", "size": "6925", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.02.3-2.0.6/released/8.7.1+1/riscv/0.0.3.html", "mode": "33188", "license": "mit", "language": []}} {"text": "\n\n npm-stop\n \n \n \n \n\n \n
\n\n

npm-stop

Stop a package

\n

SYNOPSIS

\n
npm stop [-- <args>]\n

DESCRIPTION

\n

This runs a package's "stop" script, if one was provided.

\n

SEE ALSO

\n\n\n
\n\n\n\n\n\n\n\n\n\n\n

npm-stop — npm@2.14.7

\n\n", "meta": {"content_hash": "386a7305980a40b5670a511695fa9160", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 807, "avg_line_length": 82.39473684210526, "alnum_prop": 0.6940274672628554, "repo_name": "Tearund/stories", "id": "2047d8e85417b2232688c109156deec3acb98823", "size": "3131", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "node_modules/cordova/node_modules/cordova-lib/node_modules/npm/html/doc/cli/npm-stop.html", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "4027"}, {"name": "C", "bytes": "1025"}, {"name": "CSS", "bytes": "1028"}, {"name": "HTML", "bytes": "4766"}, {"name": "Java", "bytes": "6311"}, {"name": "JavaScript", "bytes": "24488"}, {"name": "Objective-C", "bytes": "221904"}, {"name": "Shell", "bytes": "2480"}]}} {"text": "\"\"\"Implement operations for branch-based reviews.\"\"\"\n# =============================================================================\n# CONTENTS\n# -----------------------------------------------------------------------------\n# abdt_branch\n#\n# Public Classes:\n# Branch\n# .is_abandoned\n# .is_null\n# .is_new\n# .is_status_bad_pre_review\n# .is_status_bad_land\n# .is_status_bad_abandoned\n# .is_status_bad\n# .has_new_commits\n# .base_branch_name\n# .review_branch_hash\n# .review_branch_name\n# .review_id_or_none\n# .get_author_names_emails\n# .get_any_author_emails\n# .get_repo_name\n# .get_browse_url\n# .describe\n# .describe_new_commits\n# .make_message_digest\n# .make_raw_diff\n# .verify_review_branch_base\n# .get_commit_message_from_tip\n# .abandon\n# .remove\n# .clear_mark\n# .mark_bad_land\n# .mark_bad_abandoned\n# .mark_bad_in_review\n# .mark_new_bad_in_review\n# .mark_bad_pre_review\n# .mark_ok_in_review\n# .mark_ok_new_review\n# .land\n#\n# Public Functions:\n# calc_is_ok\n#\n# -----------------------------------------------------------------------------\n# (this contents block is generated, edits will be lost)\n# =============================================================================\n# TODO: write test driver\n\nfrom __future__ import absolute_import\n\nimport phlgit_checkout\nimport phlgit_log\nimport phlgit_push\nimport phlgit_revparse\nimport phlgitu_ref\nimport phlsys_textconvert\n\nimport abdt_differ\nimport abdt_errident\nimport abdt_exception\nimport abdt_lander\nimport abdt_naming\nimport abdt_tryloop\n\n# TODO: allow this to be passed in\n_MAX_DIFF_SIZE = int(1.5 * 1024 * 1024)\n\n\ndef calc_is_ok(branch):\n \"\"\"Return True if the supplied 'branch' is ok, False if bad, else None.\n\n Note that a branch can be 'null' in which case we return None.\n\n :branch: the Branch to examine\n :returns: bool status of the branch\n\n \"\"\"\n assert branch is not None\n if branch.is_null() or branch.is_new() or branch.is_abandoned():\n return None\n\n return not branch.is_status_bad()\n\n\nclass Branch(object):\n\n def __init__(\n self,\n repo,\n review_branch,\n review_hash,\n tracking_branch,\n tracking_hash,\n lander,\n repo_name,\n browse_url=None):\n \"\"\"Create a new relationship tracker for the supplied branch names.\n\n :repo: a callable supporting git commands, e.g. repo(\"status\")\n :review_branch: the abdt_gittypes.GitReviewBranch\n :review_hash: the commit hash of the branch or None\n :tracking_branch: the abdt_gittypes.GitWorkingBranch\n :tracking_hash: the commit hash of the branch or None\n :lander: a lander conformant to abdt_lander\n :repo_name: a short string to identify the repo to humans\n :browse_url: a URL to browse the branch or repo (may be None)\n\n \"\"\"\n self._repo = repo\n self._review_branch = review_branch\n self._review_hash = review_hash\n self._tracking_branch = tracking_branch\n self._tracking_hash = tracking_hash\n self._lander = lander\n assert self._review_branch_valid_or_none()\n assert self._tracking_branch_valid_or_none()\n self._repo_name = repo_name\n self._browse_url = browse_url\n assert self._repo_name is not None\n\n def _review_branch_valid_or_none(self):\n if not self._has_review_branch():\n return True\n else:\n return isinstance(\n self._review_branch,\n abdt_naming.ReviewBranch)\n\n def _tracking_branch_valid_or_none(self):\n if not self._has_tracking_branch():\n return True\n else:\n return isinstance(\n self._tracking_branch,\n abdt_naming.TrackerBranch)\n\n def _has_review_branch(self):\n return self._review_branch is not None\n\n def _has_tracking_branch(self):\n return self._tracking_branch is not None\n\n def is_abandoned(self):\n \"\"\"Return True if the author's branch no longer exists.\"\"\"\n return not self._has_review_branch() and self._has_tracking_branch()\n\n def is_null(self):\n \"\"\"Return True if we don't have any data.\"\"\"\n no_review_branch = not self._has_review_branch()\n no_tracking_branch = not self._has_tracking_branch()\n return no_review_branch and no_tracking_branch\n\n def is_new(self):\n \"\"\"Return True if we haven't marked the author's branch.\"\"\"\n return self._has_review_branch() and not self._has_tracking_branch()\n\n def is_status_bad_pre_review(self):\n \"\"\"Return True if the author's branch is marked 'bad pre-review'.\"\"\"\n if self._has_tracking_branch():\n return abdt_naming.isStatusBadPreReview(self._tracking_branch)\n else:\n return False\n\n def is_status_bad_land(self):\n \"\"\"Return True if the author's branch is marked 'bad land'.\"\"\"\n if self._has_tracking_branch():\n return abdt_naming.isStatusBadLand(self._tracking_branch)\n else:\n return False\n\n def is_status_bad_abandoned(self):\n \"\"\"Return True if the author's branch is marked 'bad abandoned'.\"\"\"\n if self._has_tracking_branch():\n branch = self._tracking_branch\n return branch.status == abdt_naming.WB_STATUS_BAD_ABANDONED\n else:\n return False\n\n def is_status_bad(self):\n \"\"\"Return True if the author's branch is marked any bad status.\"\"\"\n if self._has_tracking_branch():\n return abdt_naming.isStatusBad(self._tracking_branch)\n else:\n return False\n\n def has_new_commits(self):\n \"\"\"Return True if the author's branch is different since marked.\"\"\"\n if self.is_new():\n return True\n else:\n return self._review_hash != self._tracking_hash\n\n def base_branch_name(self):\n \"\"\"Return the string name of the branch the review will land on.\"\"\"\n if self._review_branch:\n return self._review_branch.base\n return self._tracking_branch.base\n\n def review_branch_hash(self):\n \"\"\"Return the string hash of the review branch or None.\"\"\"\n return self._review_hash\n\n def review_branch_name(self):\n \"\"\"Return the string name of the branch the review is based on.\"\"\"\n if self._review_branch:\n return self._review_branch.branch\n return self._tracking_branch.review_name\n\n def review_id_or_none(self):\n \"\"\"Return the int id of the review or 'None' if there isn't one.\"\"\"\n if not self._tracking_branch:\n return None\n\n review_id = None\n try:\n review_id = int(self._tracking_branch.id)\n except ValueError:\n pass\n\n return review_id\n\n def get_author_names_emails(self):\n \"\"\"Return a list of (name, email) tuples from the branch.\"\"\"\n hashes = self._get_commit_hashes()\n\n # names and emails are only mentioned once, in the order that they\n # appear. reverse the order so that the the most recent commit is\n # considered first.\n hashes.reverse()\n names_emails = phlgit_log.get_author_names_emails_from_hashes(\n self._repo, hashes)\n names_emails.reverse()\n\n return names_emails\n\n def get_any_author_emails(self):\n \"\"\"Return a list of emails from the branch.\n\n If the branch has an invalid base or has no history against the base\n then resort to using the whole history.\n\n Useful if 'get_author_names_emails' fails.\n\n \"\"\"\n if phlgit_revparse.get_sha1_or_none(\n self._repo, self._review_branch.remote_base) is None:\n hashes = phlgit_log.get_last_n_commit_hashes_from_ref(\n self._repo, 1, self._review_branch.remote_branch)\n else:\n hashes = self._get_commit_hashes()\n if not hashes:\n hashes = phlgit_log.get_last_n_commit_hashes_from_ref(\n self._repo, 1, self._review_branch.remote_branch)\n committers = phlgit_log.get_author_names_emails_from_hashes(\n self._repo, hashes)\n emails = [committer[1] for committer in committers]\n return emails\n\n def get_repo_name(self):\n \"\"\"Return the human name for the repo the branch came from.\"\"\"\n return self._repo_name\n\n def get_browse_url(self):\n \"\"\"Return the url to browse this branch, may be None.\"\"\"\n return self._browse_url\n\n def _get_commit_hashes(self):\n hashes = self._repo.get_range_hashes(\n self._review_branch.remote_base,\n self._review_branch.remote_branch)\n return hashes\n\n def describe(self):\n \"\"\"Return a string description of this branch for a human to read.\"\"\"\n branch_description = \"(null branch)\"\n if not self.is_null():\n branch_description = self.review_branch_name()\n if self.is_abandoned():\n branch_description += \" (abandoned)\"\n return \"{}, {}\".format(self.get_repo_name(), branch_description)\n\n def describe_new_commits(self):\n \"\"\"Return a string description of the new commits on the branch.\"\"\"\n hashes = None\n previous = None\n latest = self._review_branch.remote_branch\n\n if self.is_new():\n previous = self._review_branch.remote_base\n else:\n previous = self._tracking_branch.remote_branch\n\n hashes = self._repo.get_range_hashes(previous, latest)\n hashes.reverse()\n revisions = self._repo.make_revisions_from_hashes(hashes)\n\n message = \"\"\n for r in revisions:\n message += r.abbrev_hash + \" \" + r.subject + \"\\n\"\n return phlsys_textconvert.ensure_ascii(message)\n\n def make_message_digest(self):\n \"\"\"Return a string digest of the commit messages on the branch.\n\n The digest is comprised of the title from the earliest commit\n unique to the branch and all of the message bodies from the\n unique commits on the branch.\n\n \"\"\"\n hashes = self._get_commit_hashes()\n revisions = self._repo.make_revisions_from_hashes(hashes)\n message = revisions[0].subject + \"\\n\\n\"\n for r in revisions:\n message += r.message\n return phlsys_textconvert.ensure_ascii(message)\n\n def make_raw_diff(self):\n \"\"\"Return an abdt_differ.DiffResult of the changes on the branch.\n\n If the diff would exceed the pre-specified max diff size then take\n measures to reduce the diff.\n\n \"\"\"\n # checkout the 'to' branch, otherwise we won't take into account any\n # changes to .gitattributes files\n phlgit_checkout.branch(self._repo, self._review_branch.remote_branch)\n\n try:\n return abdt_differ.make_raw_diff(\n self._repo,\n self._review_branch.remote_base,\n self._review_branch.remote_branch,\n _MAX_DIFF_SIZE)\n except abdt_differ.NoDiffError:\n raise abdt_exception.NoDiffException(\n self.base_branch_name(),\n self.review_branch_name(),\n self.review_branch_hash())\n\n def _is_based_on(self, name, base):\n # TODO: actually do this\n return True\n\n def verify_review_branch_base(self):\n \"\"\"Raise exception if review branch has invalid base.\"\"\"\n if self._review_branch.base not in self._repo.get_remote_branches():\n raise abdt_exception.MissingBaseException(\n self._review_branch.branch,\n self._review_branch.description,\n self._review_branch.base)\n if not self._is_based_on(\n self._review_branch.branch, self._review_branch.base):\n raise abdt_exception.AbdUserException(\n \"'\" + self._review_branch.branch +\n \"' is not based on '\" + self._review_branch.base + \"'\")\n\n def get_commit_message_from_tip(self):\n \"\"\"Return string commit message from latest commit on branch.\"\"\"\n hashes = self._get_commit_hashes()\n revision = phlgit_log.make_revision_from_hash(self._repo, hashes[-1])\n message = revision.subject + \"\\n\"\n message += \"\\n\"\n message += revision.message + \"\\n\"\n return phlsys_textconvert.ensure_ascii(message)\n\n def _push_delete_review_branch(self):\n def action():\n self._repo.push_delete(self._review_branch.branch)\n\n self._tryloop(action, abdt_errident.PUSH_DELETE_REVIEW)\n\n def _push_delete_tracking_branch(self):\n def action():\n self._repo.push_delete(self._tracking_branch.branch)\n\n self._tryloop(action, abdt_errident.PUSH_DELETE_TRACKING)\n\n def abandon(self):\n \"\"\"Remove information associated with the abandoned review branch.\"\"\"\n # TODO: raise if the branch is not actually abandoned by the user\n self._push_delete_tracking_branch()\n self._tracking_branch = None\n self._tracking_hash = None\n\n def remove(self):\n \"\"\"Remove review branch and tracking branch.\"\"\"\n self._repo.archive_to_abandoned(\n self._review_hash,\n self.review_branch_name(),\n self._tracking_branch.base)\n\n # push the abandoned archive, don't escalate if it fails to push\n try:\n # XXX: oddly pylint complains if we call push_landed() directly:\n # \"Using method (_tryloop) as an attribute (not invoked)\"\n def push_abandoned():\n self._repo.push_abandoned()\n\n self._tryloop(\n push_abandoned,\n abdt_errident.PUSH_ABANDONED_ARCHIVE)\n except Exception:\n # XXX: don't worry if we can't push the landed, this is most\n # likely a permissioning issue but not a showstopper.\n # we should probably nag on the review instead.\n pass\n\n self._push_delete_review_branch()\n self._push_delete_tracking_branch()\n self._review_branch = None\n self._review_hash = None\n self._tracking_branch = None\n self._tracking_hash = None\n\n def clear_mark(self):\n \"\"\"Clear status and last commit associated with the review branch.\"\"\"\n self._push_delete_tracking_branch()\n self._tracking_branch = None\n self._tracking_hash = None\n\n def mark_bad_land(self):\n \"\"\"Mark the current version of the review branch as 'bad land'.\"\"\"\n assert self.review_id_or_none() is not None\n\n self._tryloop(\n lambda: self._push_status(abdt_naming.WB_STATUS_BAD_LAND),\n abdt_errident.MARK_BAD_LAND)\n\n def mark_bad_abandoned(self):\n \"\"\"Mark the current version of the review branch as 'bad abandoned'.\"\"\"\n assert self.review_id_or_none() is not None\n\n self._tryloop(\n lambda: self._push_status(abdt_naming.WB_STATUS_BAD_ABANDONED),\n abdt_errident.MARK_BAD_ABANDONED)\n\n def mark_bad_in_review(self):\n \"\"\"Mark the current version of the review branch as 'bad in review'.\"\"\"\n assert self.review_id_or_none() is not None\n\n self._tryloop(\n lambda: self._push_status(abdt_naming.WB_STATUS_BAD_INREVIEW),\n abdt_errident.MARK_BAD_IN_REVIEW)\n\n def mark_new_bad_in_review(self, revision_id):\n \"\"\"Mark the current version of the review branch as 'bad in review'.\"\"\"\n assert self.review_id_or_none() is None\n\n def action():\n if not self.is_new():\n # 'push_bad_new_in_review' wont clean up our existing tracker\n self._push_delete_tracking_branch()\n self._push_new(\n abdt_naming.WB_STATUS_BAD_INREVIEW,\n revision_id)\n\n self._tryloop(action, abdt_errident.MARK_NEW_BAD_IN_REVIEW)\n\n def mark_bad_pre_review(self):\n \"\"\"Mark this version of the review branch as 'bad pre review'.\"\"\"\n assert self.review_id_or_none() is None\n assert self.is_status_bad_pre_review() or self.is_new()\n\n # early out if this operation is redundant, pushing is expensive\n if self.is_status_bad_pre_review() and not self.has_new_commits():\n return\n\n def action():\n self._push_new(\n abdt_naming.WB_STATUS_BAD_PREREVIEW,\n None)\n\n self._tryloop(\n action, abdt_errident.MARK_BAD_PRE_REVIEW)\n\n def mark_ok_in_review(self):\n \"\"\"Mark this version of the review branch as 'ok in review'.\"\"\"\n assert self.review_id_or_none() is not None\n\n self._tryloop(\n lambda: self._push_status(abdt_naming.WB_STATUS_OK),\n abdt_errident.MARK_OK_IN_REVIEW)\n\n def mark_ok_new_review(self, revision_id):\n \"\"\"Mark this version of the review branch as 'ok in review'.\"\"\"\n assert self.review_id_or_none() is None\n\n def action():\n if not self.is_new():\n # 'push_bad_new_in_review' wont clean up our existing tracker\n self._push_delete_tracking_branch()\n self._push_new(\n abdt_naming.WB_STATUS_OK,\n revision_id)\n\n self._tryloop(action, abdt_errident.MARK_OK_NEW_REVIEW)\n\n def land(self, author_name, author_email, message):\n \"\"\"Integrate the branch into the base and remove the review branch.\"\"\"\n\n self._repo.checkout_forced_new_branch(\n self._tracking_branch.base,\n self._tracking_branch.remote_base)\n\n try:\n result = self._lander(\n self._repo,\n self._tracking_branch.remote_branch,\n author_name,\n author_email,\n message)\n except abdt_lander.LanderException as e:\n self._repo(\"reset\", \"--hard\") # fix the working copy\n raise abdt_exception.LandingException(\n str(e),\n self.review_branch_name(),\n self._tracking_branch.base)\n\n landing_hash = phlgit_revparse.get_sha1(\n self._repo, self._tracking_branch.base)\n\n # don't tryloop here as it's more expected that we can't push the base\n # due to permissioning or some other error\n try:\n self._repo.push(self._tracking_branch.base)\n except Exception as e:\n raise abdt_exception.LandingPushBaseException(\n str(e),\n self.review_branch_name(),\n self._tracking_branch.base)\n\n self._tryloop(\n lambda: self._repo.push_delete(\n self._tracking_branch.branch,\n self.review_branch_name()),\n abdt_errident.PUSH_DELETE_LANDED)\n\n self._repo.archive_to_landed(\n self._tracking_hash,\n self.review_branch_name(),\n self._tracking_branch.base,\n landing_hash,\n message)\n\n # push the landing archive, don't escalate if it fails to push\n try:\n # XXX: oddly pylint complains if we call push_landed() directly:\n # \"Using method (_tryloop) as an attribute (not invoked)\"\n def push_landed():\n self._repo.push_landed()\n\n self._tryloop(\n push_landed,\n abdt_errident.PUSH_LANDING_ARCHIVE)\n except Exception:\n # XXX: don't worry if we can't push the landed, this is most\n # likely a permissioning issue but not a showstopper.\n # we should probably nag on the review instead.\n pass\n\n self._review_branch = None\n self._review_hash = None\n self._tracking_branch = None\n self._tracking_hash = None\n\n return result\n\n def _push_status(self, status):\n old_branch = self._tracking_branch.branch\n\n self._tracking_branch.update_status(status)\n\n new_branch = self._tracking_branch.branch\n if old_branch == new_branch:\n phlgit_push.push_asymmetrical_force(\n self._repo,\n self._review_branch.remote_branch,\n phlgitu_ref.make_local(new_branch),\n self._tracking_branch.remote)\n else:\n phlgit_push.move_asymmetrical(\n self._repo,\n self._review_branch.remote_branch,\n phlgitu_ref.make_local(old_branch),\n phlgitu_ref.make_local(new_branch),\n self._repo.get_remote())\n\n self._tracking_hash = self._review_hash\n\n def _push_new(self, status, revision_id):\n tracking_branch = self._review_branch.make_tracker(\n status, revision_id)\n\n phlgit_push.push_asymmetrical_force(\n self._repo,\n self._review_branch.remote_branch,\n phlgitu_ref.make_local(tracking_branch.branch),\n tracking_branch.remote)\n\n self._tracking_branch = tracking_branch\n self._tracking_hash = self._review_hash\n\n def _tryloop(self, f, identifier):\n return abdt_tryloop.tryloop(f, identifier, self.describe())\n\n\n# -----------------------------------------------------------------------------\n# Copyright (C) 2013-2014 Bloomberg Finance L.P.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ------------------------------ END-OF-FILE ----------------------------------\n", "meta": {"content_hash": "7e19f938becb8eca646b05639a647d32", "timestamp": "", "source": "github", "line_count": 623, "max_line_length": 79, "avg_line_length": 35.258426966292134, "alnum_prop": 0.5866794136392607, "repo_name": "valhallasw/phabricator-tools", "id": "14468f155655f4eca7646f095836170115a5608a", "size": "21966", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "py/abd/abdt_branch.py", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C++", "bytes": "342"}, {"name": "Puppet", "bytes": "4246"}, {"name": "Python", "bytes": "964066"}, {"name": "Ruby", "bytes": "2000"}, {"name": "Shell", "bytes": "128202"}]}} {"text": "package com.eyeem.recyclerviewtools.adapter;\n\nimport android.support.v7.widget.RecyclerView;\nimport android.view.View;\n\nimport com.eyeem.recyclerviewtools.OnItemClickListener;\n\n/**\n * Created by budius on 01.04.15.\n *

\n * Simple implementation of {@link android.widget.AdapterView.OnItemClickListener AdapterView.OnItemClickListener}\n * refactored to {@link android.support.v7.widget.RecyclerView RecyclerView}\n *

\n * Just like original, this only catch clicks on the whole view.\n * For finer control on the target view for the click, you still must create a custom implementation.\n */\n/* package */ class OnItemClickListenerDetector implements View.OnClickListener {\n\n private final RecyclerView recyclerView;\n private final OnItemClickListener onItemClickListener;\n final boolean ignoreExtras;\n\n OnItemClickListenerDetector(\n RecyclerView recyclerView,\n OnItemClickListener onItemClickListener,\n boolean ignoreExtras) {\n this.recyclerView = recyclerView;\n this.onItemClickListener = onItemClickListener;\n this.ignoreExtras = ignoreExtras;\n }\n\n @Override\n public void onClick(View view) {\n\n RecyclerView.ViewHolder holder = recyclerView.getChildViewHolder(view);\n int position = holder.getAdapterPosition();\n long id = holder.getItemId();\n\n RecyclerView.Adapter adapter = recyclerView.getAdapter();\n\n if (ignoreExtras && adapter instanceof WrapAdapter) {\n WrapAdapter a = (WrapAdapter) adapter;\n position = a.recyclerToWrappedPosition.get(position);\n }\n\n // this can happen if data set is changing onItemClick and user clicks fast\n if (position < 0 || position >= adapter.getItemCount()) return;\n\n onItemClickListener.onItemClick(recyclerView, view, position, id, holder);\n }\n}\n", "meta": {"content_hash": "d121f2b06eb0026044323e30ba68d0ae", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 114, "avg_line_length": 35.470588235294116, "alnum_prop": 0.7357656163626313, "repo_name": "eyeem/RecyclerViewTools", "id": "1d76d2043cf44e1f946bdd4254b8ce8b6b43d2e6", "size": "1809", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "library/src/main/java/com/eyeem/recyclerviewtools/adapter/OnItemClickListenerDetector.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "54239"}]}} {"text": "\n\npackage org.kaazing.gateway.service.cluster;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.concurrent.locks.Lock;\n\nimport com.hazelcast.core.IdGenerator;\nimport org.kaazing.gateway.service.messaging.buffer.MessageBufferFactory;\nimport org.kaazing.gateway.service.messaging.collections.CollectionsFactory;\n\npublic interface ClusterContext {\n\n // initialization\n void start();\n \n // for gateway shutdown\n void dispose();\n \n // cluster participation\n MemberId getLocalMember();\n String getInstanceKey(MemberId memberId);\n \n String getClusterName();\n\n // Return a list of the memberIds of all current cluster members\n Collection getMemberIds();\n\n List getAccepts();\n List getConnects();\n ClusterConnectOptionsContext getConnectOptions();\n\n // cluster collections\n Lock getLock(Object obj);\n IdGenerator getIdGenerator(String name);\n\n // cluster messaging\n void addReceiveTopic(String name);\n void addReceiveQueue(String name);\n T send(Object msg, MemberId member) throws Exception;\n T send(Object msg, String name) throws Exception;\n void send(Object msg, final SendListener listener, MemberId member);\n void send(Object msg, final SendListener listener, String name);\n void setReceiver(Class type, ReceiveListener receiveListener);\n void removeReceiver(Class type);\n\n // event listener\n void addMembershipEventListener(MembershipEventListener eventListener);\n void removeMembershipEventListener(MembershipEventListener eventListener);\n\n // instanceKey listener\n void addInstanceKeyListener(InstanceKeyListener instanceKeyListener);\n void removeInstanceKeyListener(InstanceKeyListener instanceKeyListener);\n\n // balancermap listener\n void addBalancerMapListener(BalancerMapListener balancerMapListener);\n void removeBalancerMapListener(BalancerMapListener balancerMapListener);\n\n MessageBufferFactory getMessageBufferFactory();\n CollectionsFactory getCollectionsFactory();\n\n void logClusterState();\n}\n", "meta": {"content_hash": "3e0340fc604a552d21e90c6fa23e5dd8", "timestamp": "", "source": "github", "line_count": 64, "max_line_length": 78, "avg_line_length": 32.78125, "alnum_prop": 0.7640610104861774, "repo_name": "biddyweb/gateway", "id": "921cdc3f28e19a42ed8fc680491cff33f873b20e", "size": "2981", "binary": false, "copies": "7", "ref": "refs/heads/develop", "path": "service/spi/src/main/java/org/kaazing/gateway/service/cluster/ClusterContext.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2250"}, {"name": "HTML", "bytes": "38948"}, {"name": "Java", "bytes": "10720288"}, {"name": "JavaScript", "bytes": "3868"}, {"name": "Shell", "bytes": "5534"}, {"name": "XSLT", "bytes": "5493"}]}} {"text": "addJS('js/blueprint.js');\n }\n\n protected function renderRightMenu() {\n $links = LinkUI('manager log',\n 'viewlog.php?sid='.$this->service->getSID())->setExternal(true);\n\n if ($this->service->isRunning()) {\n $console_url = $this->service->getAccessLocation();\n $links .= ' · ' .LinkUI('hub', $console_url)->setExternal(true);\n }\n\n return '

'.$links.'
';\n }\n\n protected function renderInstanceActions() {\n return EditableTag()->setColor('purple')->setID('node')->setValue('0')->setText('BluePrint Nodes');\n }\n\n public function renderContent() {\n return $this->renderInstancesSection();\n }\n}\n?>\n", "meta": {"content_hash": "bebc9cb2a887f09c904012438273c80c", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 107, "avg_line_length": 29.62162162162162, "alnum_prop": 0.6076642335766423, "repo_name": "ConPaaS-team/conpaas", "id": "15ec789be8fbe714aa6f691ae19aa487fff7c305", "size": "2737", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "conpaas-blueprints/conpaas-frontend/www/lib/ui/page/blueprint/__init__.php", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "ApacheConf", "bytes": "79"}, {"name": "Batchfile", "bytes": "2136"}, {"name": "C", "bytes": "12346"}, {"name": "CSS", "bytes": "47680"}, {"name": "HTML", "bytes": "5494"}, {"name": "Java", "bytes": "404303"}, {"name": "JavaScript", "bytes": "164519"}, {"name": "M4", "bytes": "553"}, {"name": "Makefile", "bytes": "78772"}, {"name": "Nginx", "bytes": "1980"}, {"name": "PHP", "bytes": "1900634"}, {"name": "Python", "bytes": "2842443"}, {"name": "Shell", "bytes": "232043"}, {"name": "Smarty", "bytes": "15450"}]}} {"text": "\npackage org.apache.hyracks.control.common.job.profiling.om;\n\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.io.Serializable;\n\nimport org.apache.hyracks.api.io.IWritable;\nimport org.apache.hyracks.api.partitions.PartitionId;\nimport org.apache.hyracks.control.common.job.profiling.counters.MultiResolutionEventProfiler;\n\npublic class PartitionProfile implements IWritable, Serializable {\n private static final long serialVersionUID = 1L;\n\n private PartitionId pid;\n\n private long openTime;\n\n private long closeTime;\n\n private MultiResolutionEventProfiler mrep;\n\n public static PartitionProfile create(DataInput dis) throws IOException {\n PartitionProfile partitionProfile = new PartitionProfile();\n partitionProfile.readFields(dis);\n return partitionProfile;\n }\n\n private PartitionProfile() {\n\n }\n\n public PartitionProfile(PartitionId pid, long openTime, long closeTime, MultiResolutionEventProfiler mrep) {\n this.pid = pid;\n this.openTime = openTime;\n this.closeTime = closeTime;\n this.mrep = mrep;\n }\n\n public PartitionId getPartitionId() {\n return pid;\n }\n\n public long getOpenTime() {\n return openTime;\n }\n\n public long getCloseTime() {\n return closeTime;\n }\n\n public MultiResolutionEventProfiler getSamples() {\n return mrep;\n }\n\n @Override\n public void writeFields(DataOutput output) throws IOException {\n output.writeLong(closeTime);\n output.writeLong(openTime);\n mrep.writeFields(output);\n pid.writeFields(output);\n }\n\n @Override\n public void readFields(DataInput input) throws IOException {\n closeTime = input.readLong();\n openTime = input.readLong();\n mrep = MultiResolutionEventProfiler.create(input);\n pid = PartitionId.create(input);\n }\n}", "meta": {"content_hash": "6d10aa221472af5036271db11282998d", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 112, "avg_line_length": 26.430555555555557, "alnum_prop": 0.7004729374671571, "repo_name": "kisskys/incubator-asterixdb-hyracks", "id": "b1cd16d58f67228830e159253ef64ef05f99a154", "size": "2710", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/PartitionProfile.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2606"}, {"name": "CSS", "bytes": "893"}, {"name": "HTML", "bytes": "8762"}, {"name": "Java", "bytes": "8406768"}, {"name": "JavaScript", "bytes": "24904"}, {"name": "Shell", "bytes": "16545"}]}} {"text": "\ufeff// ----------------------------------------------------------------------------\n// \n// PhotonNetwork Framework for Unity - Copyright (C) 2011 Exit Games GmbH\n// \n// \n//\n// \n// developer@exitgames.com\n// ----------------------------------------------------------------------------\n\n#pragma warning disable 1587\n/// \\file\n/// Wraps up several of the commonly used enumerations. \n#pragma warning restore 1587\n\n\nusing System;\nusing ExitGames.Client.Photon;\n\n\n/// \n/// This enum defines the set of MonoMessages Photon Unity Networking is using as callbacks. Implemented by PunBehaviour.\n/// \n/// \n/// Much like \"Update()\" in Unity, PUN will call methods in specific situations.\n/// Often, these methods are triggered when network operations complete (example: when joining a room).\n///\n/// All those methods are defined and described in this enum and implemented by PunBehaviour\n/// (which makes it easy to implement them as override).\n///\n/// Each entry is the name of such a method and the description tells you when it gets used by PUN.\n///\n/// Make sure to read the remarks per entry as some methods have optional parameters.\n/// \n/// \\ingroup publicApi\npublic enum PhotonNetworkingMessage\n{\n /// \n /// Called when the initial connection got established but before you can use the server. OnJoinedLobby() or OnConnectedToMaster() are called when PUN is ready.\n /// \n /// \n /// This callback is only useful to detect if the server can be reached at all (technically).\n /// Most often, it's enough to implement OnFailedToConnectToPhoton() and OnDisconnectedFromPhoton().\n ///\n /// OnJoinedLobby() or OnConnectedToMaster() are called when PUN is ready.\n ///\n /// When this is called, the low level connection is established and PUN will send your AppId, the user, etc in the background.\n /// This is not called for transitions from the masterserver to game servers.\n ///\n /// Example: void OnConnectedToPhoton() { ... }\n /// \n OnConnectedToPhoton,\n\n /// \n /// Called when the local user/client left a room.\n /// \n /// \n /// When leaving a room, PUN brings you back to the Master Server.\n /// Before you can use lobbies and join or create rooms, OnJoinedLobby() or OnConnectedToMaster() will get called again.\n ///\n /// Example: void OnLeftRoom() { ... }\n /// \n OnLeftRoom,\n\n /// \n /// Called after switching to a new MasterClient when the current one leaves.\n /// \n /// \n /// This is not called when this client enters a room.\n /// The former MasterClient is still in the player list when this method get called.\n ///\n /// Example: void OnMasterClientSwitched(PhotonPlayer newMasterClient) { ... }\n /// \n OnMasterClientSwitched,\n\n /// \n /// Called when a CreateRoom() call failed. Optional parameters provide ErrorCode and message.\n /// \n /// \n /// Most likely because the room name is already in use (some other client was faster than you).\n /// PUN logs some info if the PhotonNetwork.logLevel is >= PhotonLogLevel.Informational.\n ///\n /// Example: void OnPhotonCreateRoomFailed() { ... }\n ///\n /// Example: void OnPhotonCreateRoomFailed(object[] codeAndMsg) { // codeAndMsg[0] is short ErrorCode. codeAndMsg[1] is string debug msg. }\n /// \n OnPhotonCreateRoomFailed,\n\n /// \n /// Called when a JoinRoom() call failed. Optional parameters provide ErrorCode and message.\n /// \n /// \n /// Most likely error is that the room does not exist or the room is full (some other client was faster than you).\n /// PUN logs some info if the PhotonNetwork.logLevel is >= PhotonLogLevel.Informational.\n ///\n /// Example: void OnPhotonJoinRoomFailed() { ... }\n ///\n /// Example: void OnPhotonJoinRoomFailed(object[] codeAndMsg) { // codeAndMsg[0] is short ErrorCode. codeAndMsg[1] is string debug msg. }\n /// \n OnPhotonJoinRoomFailed,\n\n /// \n /// Called when this client created a room and entered it. OnJoinedRoom() will be called as well.\n /// \n /// \n /// This callback is only called on the client which created a room (see PhotonNetwork.CreateRoom).\n ///\n /// As any client might close (or drop connection) anytime, there is a chance that the\n /// creator of a room does not execute OnCreatedRoom.\n ///\n /// If you need specific room properties or a \"start signal\", it is safer to implement\n /// OnMasterClientSwitched() and to make the new MasterClient check the room's state.\n ///\n /// Example: void OnCreatedRoom() { ... }\n /// \n OnCreatedRoom,\n\n /// \n /// Called on entering a lobby on the Master Server. The actual room-list updates will call OnReceivedRoomListUpdate().\n /// \n /// \n /// Note: When PhotonNetwork.autoJoinLobby is false, OnConnectedToMaster() will be called and the room list won't become available.\n ///\n /// While in the lobby, the roomlist is automatically updated in fixed intervals (which you can't modify).\n /// The room list gets available when OnReceivedRoomListUpdate() gets called after OnJoinedLobby().\n ///\n /// Example: void OnJoinedLobby() { ... }\n /// \n OnJoinedLobby,\n\n /// \n /// Called after leaving a lobby.\n /// \n /// \n /// When you leave a lobby, [CreateRoom](@ref PhotonNetwork.CreateRoom) and [JoinRandomRoom](@ref PhotonNetwork.JoinRandomRoom)\n /// automatically refer to the default lobby.\n ///\n /// Example: void OnLeftLobby() { ... }\n /// \n OnLeftLobby,\n\n /// \n /// Called after disconnecting from the Photon server.\n /// \n /// \n /// In some cases, other callbacks are called before OnDisconnectedFromPhoton is called.\n /// Examples: OnConnectionFail() and OnFailedToConnectToPhoton().\n ///\n /// Example: void OnDisconnectedFromPhoton() { ... }\n /// \n OnDisconnectedFromPhoton,\n\n /// \n /// Called when something causes the connection to fail (after it was established), followed by a call to OnDisconnectedFromPhoton().\n /// \n /// \n /// If the server could not be reached in the first place, OnFailedToConnectToPhoton is called instead.\n /// The reason for the error is provided as StatusCode.\n ///\n /// Example: void OnConnectionFail(DisconnectCause cause) { ... }\n /// \n OnConnectionFail,\n\n /// \n /// Called if a connect call to the Photon server failed before the connection was established, followed by a call to OnDisconnectedFromPhoton().\n /// \n /// \n /// OnConnectionFail only gets called when a connection to a Photon server was established in the first place.\n ///\n /// Example: void OnFailedToConnectToPhoton(DisconnectCause cause) { ... }\n /// \n OnFailedToConnectToPhoton,\n\n /// \n /// Called for any update of the room-listing while in a lobby (PhotonNetwork.insideLobby) on the Master Server.\n /// \n /// \n /// PUN provides the list of rooms by PhotonNetwork.GetRoomList().
\n /// Each item is a RoomInfo which might include custom properties (provided you defined those as lobby-listed when creating a room).\n ///\n /// Not all types of lobbies provide a listing of rooms to the client. Some are silent and specialized for server-side matchmaking.\n ///\n /// Example: void OnReceivedRoomListUpdate() { ... }\n ///
\n OnReceivedRoomListUpdate,\n\n /// \n /// Called when entering a room (by creating or joining it). Called on all clients (including the Master Client).\n /// \n /// \n /// This method is commonly used to instantiate player characters.\n /// If a match has to be started \"actively\", you can instead call an [PunRPC](@ref PhotonView.RPC) triggered by a user's button-press or a timer.\n ///\n /// When this is called, you can usually already access the existing players in the room via PhotonNetwork.playerList.\n /// Also, all custom properties should be already available as Room.customProperties. Check Room.playerCount to find out if\n /// enough players are in the room to start playing.\n ///\n /// Example: void OnJoinedRoom() { ... }\n /// \n OnJoinedRoom,\n\n /// \n /// Called when a remote player entered the room. This PhotonPlayer is already added to the playerlist at this time.\n /// \n /// \n /// If your game starts with a certain number of players, this callback can be useful to check the\n /// Room.playerCount and find out if you can start.\n ///\n /// Example: void OnPhotonPlayerConnected(PhotonPlayer newPlayer) { ... }\n /// \n OnPhotonPlayerConnected,\n\n /// \n /// Called when a remote player left the room. This PhotonPlayer is already removed from the playerlist at this time.\n /// \n /// \n /// When your client calls PhotonNetwork.leaveRoom, PUN will call this method on the remaining clients.\n /// When a remote client drops connection or gets closed, this callback gets executed. after a timeout\n /// of several seconds.\n ///\n /// Example: void OnPhotonPlayerDisconnected(PhotonPlayer otherPlayer) { ... }\n /// \n OnPhotonPlayerDisconnected,\n\n /// \n /// Called after a JoinRandom() call failed. Optional parameters provide ErrorCode and message.\n /// \n /// \n /// Most likely all rooms are full or no rooms are available.\n /// When using multiple lobbies (via JoinLobby or TypedLobby), another lobby might have more/fitting rooms.\n /// PUN logs some info if the PhotonNetwork.logLevel is >= PhotonLogLevel.Informational.\n ///\n /// Example: void OnPhotonRandomJoinFailed() { ... }\n ///\n /// Example: void OnPhotonRandomJoinFailed(object[] codeAndMsg) { // codeAndMsg[0] is short ErrorCode. codeAndMsg[1] is string debug msg. }\n /// \n OnPhotonRandomJoinFailed,\n\n /// \n /// Called after the connection to the master is established and authenticated but only when PhotonNetwork.autoJoinLobby is false.\n /// \n /// \n /// If you set PhotonNetwork.autoJoinLobby to true, OnJoinedLobby() will be called instead of this.\n ///\n /// You can join rooms and create them even without being in a lobby. The default lobby is used in that case.\n /// The list of available rooms won't become available unless you join a lobby via PhotonNetwork.joinLobby.\n ///\n /// Example: void OnConnectedToMaster() { ... }\n /// \n OnConnectedToMaster,\n\n /// \n /// Implement to customize the data a PhotonView regularly synchronizes. Called every 'network-update' when observed by PhotonView.\n /// \n /// \n /// This method will be called in scripts that are assigned as Observed component of a PhotonView.\n /// PhotonNetwork.sendRateOnSerialize affects how often this method is called.\n /// PhotonNetwork.sendRate affects how often packages are sent by this client.\n ///\n /// Implementing this method, you can customize which data a PhotonView regularly synchronizes.\n /// Your code defines what is being sent (content) and how your data is used by receiving clients.\n ///\n /// Unlike other callbacks, OnPhotonSerializeView only gets called when it is assigned\n /// to a PhotonView as PhotonView.observed script.\n ///\n /// To make use of this method, the PhotonStream is essential. It will be in \"writing\" mode\" on the\n /// client that controls a PhotonView (PhotonStream.isWriting == true) and in \"reading mode\" on the\n /// remote clients that just receive that the controlling client sends.\n ///\n /// If you skip writing any value into the stream, PUN will skip the update. Used carefully, this can\n /// conserve bandwidth and messages (which have a limit per room/second).\n ///\n /// Note that OnPhotonSerializeView is not called on remote clients when the sender does not send\n /// any update. This can't be used as \"x-times per second Update()\".\n ///\n /// Example: void OnPhotonSerializeView(PhotonStream stream, PhotonMessageInfo info) { ... }\n /// \n OnPhotonSerializeView,\n\n /// \n /// Called on all scripts on a GameObject (and children) that have been Instantiated using PhotonNetwork.Instantiate.\n /// \n /// \n /// PhotonMessageInfo parameter provides info about who created the object and when (based off PhotonNetworking.time).\n ///\n /// Example: void OnPhotonInstantiate(PhotonMessageInfo info) { ... }\n /// \n OnPhotonInstantiate,\n\n /// \n /// Because the concurrent user limit was (temporarily) reached, this client is rejected by the server and disconnecting.\n /// \n /// \n /// When this happens, the user might try again later. You can't create or join rooms in OnPhotonMaxCcuReached(), cause the client will be disconnecting.\n /// You can raise the CCU limits with a new license (when you host yourself) or extended subscription (when using the Photon Cloud).\n /// The Photon Cloud will mail you when the CCU limit was reached. This is also visible in the Dashboard (webpage).\n ///\n /// Example: void OnPhotonMaxCccuReached() { ... }\n /// \n OnPhotonMaxCccuReached,\n\n /// \n /// Called when a room's custom properties changed. The propertiesThatChanged contains all that was set via Room.SetCustomProperties.\n /// \n /// \n /// Since v1.25 this method has one parameter: Hashtable propertiesThatChanged.\n /// Changing properties must be done by Room.SetCustomProperties, which causes this callback locally, too.\n ///\n /// Example: void OnPhotonCustomRoomPropertiesChanged(Hashtable propertiesThatChanged) { ... }\n /// \n OnPhotonCustomRoomPropertiesChanged,\n\n /// \n /// Called when custom player-properties are changed. Player and the changed properties are passed as object[].\n /// \n /// \n /// Since v1.25 this method has one parameter: object[] playerAndUpdatedProps, which contains two entries.
\n /// [0] is the affected PhotonPlayer.
\n /// [1] is the Hashtable of properties that changed.
\n ///\n /// We are using a object[] due to limitations of Unity's GameObject.SendMessage (which has only one optional parameter).\n ///\n /// Changing properties must be done by PhotonPlayer.SetCustomProperties, which causes this callback locally, too.\n ///\n /// Example:
\n    /// void OnPhotonPlayerPropertiesChanged(object[] playerAndUpdatedProps) {\n    ///     PhotonPlayer player = playerAndUpdatedProps[0] as PhotonPlayer;\n    ///     Hashtable props = playerAndUpdatedProps[1] as Hashtable;\n    ///     //...\n    /// }
\n ///
\n OnPhotonPlayerPropertiesChanged,\n\n /// \n /// Called when the server sent the response to a FindFriends request and updated PhotonNetwork.Friends.\n /// \n /// \n /// The friends list is available as PhotonNetwork.Friends, listing name, online state and\n /// the room a user is in (if any).\n ///\n /// Example: void OnUpdatedFriendList() { ... }\n /// \n OnUpdatedFriendList,\n\n /// \n /// Called when the custom authentication failed. Followed by disconnect!\n /// \n /// \n /// Custom Authentication can fail due to user-input, bad tokens/secrets.\n /// If authentication is successful, this method is not called. Implement OnJoinedLobby() or OnConnectedToMaster() (as usual).\n ///\n /// During development of a game, it might also fail due to wrong configuration on the server side.\n /// In those cases, logging the debugMessage is very important.\n ///\n /// Unless you setup a custom authentication service for your app (in the [Dashboard](https://www.photonengine.com/dashboard)),\n /// this won't be called!\n ///\n /// Example: void OnCustomAuthenticationFailed(string debugMessage) { ... }\n /// \n OnCustomAuthenticationFailed,\n\n /// \n /// Called when your Custom Authentication service responds with additional data.\n /// \n /// \n /// Custom Authentication services can include some custom data in their response.\n /// When present, that data is made available in this callback as Dictionary.\n /// While the keys of your data have to be strings, the values can be either string or a number (in Json).\n /// You need to make extra sure, that the value type is the one you expect. Numbers become (currently) int64.\n ///\n /// Example: void OnCustomAuthenticationResponse(Dictionary<string, object> data) { ... }\n /// \n /// \n OnCustomAuthenticationResponse,\n\n /// \n /// Called by PUN when the response to a WebRPC is available. See PhotonNetwork.WebRPC.\n /// \n /// \n /// Important: The response.ReturnCode is 0 if Photon was able to reach your web-service.\n /// The content of the response is what your web-service sent. You can create a WebResponse instance from it.\n /// Example: WebRpcResponse webResponse = new WebRpcResponse(operationResponse);\n ///\n /// Please note: Class OperationResponse is in a namespace which needs to be \"used\":\n /// using ExitGames.Client.Photon; // includes OperationResponse (and other classes)\n ///\n /// The OperationResponse.ReturnCode by Photon is:\n /// 0 for \"OK\"\n /// -3 for \"Web-Service not configured\" (see Dashboard / WebHooks)\n /// -5 for \"Web-Service does now have RPC path/name\" (at least for Azure)\n ///\n /// Example: void OnWebRpcResponse(OperationResponse response) { ... }\n /// \n OnWebRpcResponse,\n\n /// \n /// Called when another player requests ownership of a PhotonView from you (the current owner).\n /// \n /// \n /// The parameter viewAndPlayer contains:\n ///\n /// PhotonView view = viewAndPlayer[0] as PhotonView;\n ///\n /// PhotonPlayer requestingPlayer = viewAndPlayer[1] as PhotonPlayer;\n /// \n /// void OnOwnershipRequest(object[] viewAndPlayer) {} //\n OnOwnershipRequest,\n\n /// \n /// Called when the Master Server sent an update for the Lobby Statistics, updating PhotonNetwork.LobbyStatistics.\n /// \n /// \n /// This callback has two preconditions:\n /// EnableLobbyStatistics must be set to true, before this client connects.\n /// And the client has to be connected to the Master Server, which is providing the info about lobbies.\n /// \n OnLobbyStatisticsUpdate,\n\n\n\t/// \n\t/// Called when a remote Photon Player activity changed. This will be called ONLY is PlayerTtl is greater then 0.\n\t///\n\t/// Use PhotonPlayer.IsInactive to check the current activity state\n\t///\n\t/// Example: void OnPhotonPlayerActivityChanged(PhotonPlayer otherPlayer) {...}\n\t/// \n\t/// \n\t/// This callback has precondition:\n\t/// PlayerTtl must be greater then 0\n\t/// \n\tOnPhotonPlayerActivityChanged,\n\n\n\t/// \n\t/// Called when a PhotonView Owner is transfered to a Player.\n\t/// \n\t/// \n\t/// The parameter viewAndPlayers contains:\n\t///\n\t/// PhotonView view = viewAndPlayers[0] as PhotonView;\n\t///\n\t/// PhotonPlayer newOwner = viewAndPlayers[1] as PhotonPlayer;\n\t///\n\t/// PhotonPlayer oldOwner = viewAndPlayers[2] as PhotonPlayer;\n\t/// \n\t/// void OnOwnershipTransfered(object[] viewAndPlayers) {} //\n\tOnOwnershipTransfered,\n}\n\n\n/// Used to define the level of logging output created by the PUN classes. Either log errors, info (some more) or full.\n/// \\ingroup publicApi\npublic enum PhotonLogLevel\n{\n /// Show only errors. Minimal output. Note: Some might be \"runtime errors\" which you have to expect.\n ErrorsOnly,\n /// Logs some of the workflow, calls and results.\n Informational,\n /// Every available log call gets into the console/log. Only use for debugging.\n Full\n}\n\n\n/// Enum of \"target\" options for RPCs. These define which remote clients get your RPC call. \n/// \\ingroup publicApi\npublic enum PhotonTargets\n{\n /// Sends the RPC to everyone else and executes it immediately on this client. Player who join later will not execute this RPC.\n All,\n /// Sends the RPC to everyone else. This client does not execute the RPC. Player who join later will not execute this RPC.\n Others,\n /// Sends the RPC to MasterClient only. Careful: The MasterClient might disconnect before it executes the RPC and that might cause dropped RPCs.\n MasterClient,\n /// Sends the RPC to everyone else and executes it immediately on this client. New players get the RPC when they join as it's buffered (until this client leaves).\n AllBuffered,\n /// Sends the RPC to everyone. This client does not execute the RPC. New players get the RPC when they join as it's buffered (until this client leaves).\n OthersBuffered,\n /// Sends the RPC to everyone (including this client) through the server.\n /// \n /// This client executes the RPC like any other when it received it from the server.\n /// Benefit: The server's order of sending the RPCs is the same on all clients.\n /// \n AllViaServer,\n /// Sends the RPC to everyone (including this client) through the server and buffers it for players joining later.\n /// \n /// This client executes the RPC like any other when it received it from the server.\n /// Benefit: The server's order of sending the RPCs is the same on all clients.\n /// \n AllBufferedViaServer\n}\n\n\n/// Currently available Photon Cloud regions as enum.\n/// \n/// This is used in PhotonNetwork.ConnectToRegion.\n/// \npublic enum CloudRegionCode\n{\n /// European servers in Amsterdam.\n eu = 0,\n /// US servers (East Coast).\n us = 1,\n /// Asian servers in Singapore.\n asia = 2,\n /// Japanese servers in Tokyo.\n jp = 3,\n /// Australian servers in Melbourne.\n au = 5,\n ///USA West, San Jos\u00e9, usw\n usw = 6,\n ///South America, Sao Paulo, sa\n sa = 7,\n ///Canada East, Montreal, cae\n cae = 8,\n ///South Korea, Seoul, kr\n kr = 9,\n ///India, Chennai, in\n @in = 10,\n /// Russia, ru\n ru = 11,\n\n /// No region selected.\n none = 4\n};\n\n\n/// \n/// Available regions as enum of flags. To be used as \"enabled\" flags for Best Region pinging.\n/// \n/// Note that these enum values skip CloudRegionCode.none and their values are in strict order (power of 2).\n[Flags]\npublic enum CloudRegionFlag\n{\n eu = 1 << 0,\n us = 1 << 1,\n asia = 1 << 2,\n jp = 1 << 3,\n au = 1 << 4,\n usw = 1 << 5,\n sa = 1 << 6,\n cae = 1 << 7,\n kr = 1 << 8,\n @in = 1 << 9,\n ru = 1 << 10\n};\n\n\n/// \n/// High level connection state of the client. Better use the more detailed .\n/// \npublic enum ConnectionState\n{\n Disconnected,\n Connecting,\n Connected,\n Disconnecting,\n InitializingApplication\n}\n\n\n/// \n/// Defines how the communication gets encrypted.\n/// \npublic enum EncryptionMode\n{\n /// \n /// This is the default encryption mode: Messages get encrypted only on demand (when you send operations with the \"encrypt\" parameter set to true).\n /// \n PayloadEncryption,\n /// \n /// With this encryption mode for UDP, the connection gets setup and all further datagrams get encrypted almost entirely. On-demand message encryption (like in PayloadEncryption) is skipped.\n /// \n /// \n /// This mode requires AuthOnce or AuthOnceWss as AuthMode!\n /// \n DatagramEncryption = 10,\n}\n\n\npublic static class EncryptionDataParameters\n{\n /// \n /// Key for encryption mode\n /// \n public const byte Mode = 0;\n /// \n /// Key for first secret\n /// \n public const byte Secret1 = 1;\n /// \n /// Key for second secret\n /// \n public const byte Secret2 = 2;\n}", "meta": {"content_hash": "3dac8078c3a3ccd663352f86fb27aa45", "timestamp": "", "source": "github", "line_count": 594, "max_line_length": 194, "avg_line_length": 42.86531986531987, "alnum_prop": 0.6711570183017831, "repo_name": "mark818/VRGame", "id": "0f68e462b72e5de534ecc4f7025bf9413287b5ee", "size": "25465", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Assets/Photon Unity Networking/Plugins/PhotonNetwork/Enums.cs", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C#", "bytes": "2517881"}, {"name": "HLSL", "bytes": "11555"}, {"name": "JavaScript", "bytes": "43789"}, {"name": "Mask", "bytes": "305"}, {"name": "ShaderLab", "bytes": "27926"}]}} {"text": "assertTrue(true);\n }\n\n /**\n * @group fixtures\n */\n public function testFalsehood()\n {\n $this->assertFalse(false);\n }\n\n /**\n * @group fixtures\n */\n public function testArrayLength()\n {\n $elems = array(1,2,3,4,5);\n $this->assertEquals(5, sizeof($elems));\n }\n}\n", "meta": {"content_hash": "ff6ab174255b36337e2c71021d233a4f", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 66, "avg_line_length": 16.548387096774192, "alnum_prop": 0.5341130604288499, "repo_name": "quizlet/paratest", "id": "04a48d88b9018142c2e4d3edeb0fdef88ee245a0", "size": "513", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "test/fixtures/passing-tests/level1/level2/UnitTestInSubSubLevelTest.php", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "70"}, {"name": "PHP", "bytes": "237507"}, {"name": "Ruby", "bytes": "270"}]}} {"text": "@interface MyViewController () @end\n\n@implementation MyViewController {}\n\n@synthesize webView, baseURL, url;\n\n- (void)viewDidLoad\n{\n\t[super viewDidLoad];\n\tself.title = @\"Presented ViewController\";\n\t[self.webView setMainFrameURL:[self appURL]];\n}\n\n- (IBAction)dismiss:(id)sender\n{\n\tif (self.presentingViewController) {\n\t\t[self.presentingViewController dismissViewController:self];\n\t} else {\n\t\t// for the 'show' transition\n\t\t[self.view.window close];\n\t}\n}\n\n// Changes the greeting message by executing a function in JavaScript.\n// This is triggered from the Change Greeting menu item.\n- (IBAction)changeGreeting:(id)sender\n{\n\t[[webView windowScriptObject] evaluateWebScript:@\"changeGreeting('Hello from Objective-C!')\"];\n}\n\n// Here we grab the URL to the bundled index.html document.\n// Normally it would be the URL to your web app such as @\"http://example.com\".\n- (NSString *)appURL\n{\n\t// return [[[NSBundle mainBundle] URLForResource:@\"http://www.google\" withExtension:@\"html\"] absoluteString];\n\n//\tself.baseURL\t= [NSURL URLWithString:@\"file:///path/to/web_root/\"];\n//\tself.url\t\t= [NSURL URLWithString:@\"folder/file.html\" relativeToURL:self.baseURL];\n \n NSString *path = [[NSBundle mainBundle] pathForResource:@\"index\" ofType:@\"html\"];\n self.url = [NSURL fileURLWithPath:path];\n [[self.webView mainFrame] loadRequest:[NSURLRequest requestWithURL:self.url]];\n\n\tNSURL *absURL = [self.url absoluteURL];\n\tNSLog(@\"absURL = %@\", absURL);\n\tNSString *getURL = [NSString stringWithContentsOfURL:absURL encoding:1000 error:nil];\n\t// self.title = [NSString stringWithContentsOfURL:absURL encoding:1000 error:nil];;\n\n\treturn getURL;\n}\n\n// This delegate method gets triggered every time the page loads, but before the JavaScript runs\n- (void)webView:(WebView *)webView windowScriptObjectAvailable:(WebScriptObject *)windowScriptObject\n{\n\t// Allow this class to be usable through the \"window.app\" object in JavaScript\n\t// This could be any Objective-C class\n\t[windowScriptObject setValue:self forKey:@\"app\"];\n}\n\n\n- (IBAction)resetSafari:(id)sender {\n \n \n\t[self openAppleScript:(NSString *)@\"SafariCloseAllWindows\"];\n [self openAppleScript:(NSString *)@\"ResetSafari\"];\n \n \n}\n\n#pragma mark ViewController openAppleScript\n\n- (void)openAppleScript:(NSString *)scriptName\n{\n NSLog(@\"%@\", NSStringFromSelector(_cmd));\n NSString\t\t*path\t\t\t= [[NSBundle mainBundle] pathForResource:scriptName ofType:@\"scpt\"];\n NSURL\t\t\t*openUrl\t\t\t= [NSURL fileURLWithPath:path]; NSDictionary *errors = [NSDictionary dictionary];\n NSAppleScript\t*appleScript\t= [[NSAppleScript alloc] initWithContentsOfURL:openUrl error:&errors];\n [appleScript executeAndReturnError:nil];\n}\n\n- (IBAction)openMyPayPal:(id)sender{\n\n [self openAppleScript:(NSString *)@\"OpenMyPaypal\"];\n\n}\n\n@end\n", "meta": {"content_hash": "99e5612dbe476b6b06200030baf9cded", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 110, "avg_line_length": 31.908045977011493, "alnum_prop": 0.7312680115273775, "repo_name": "RandyMcMillan/GoogleHacks", "id": "144dd413cccbd97fef34a8b9d66c3d468e7999ca", "size": "2951", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "osx/GoogleHacks/GoogleHacks/MyViewController.m", "mode": "33188", "license": "mit", "language": [{"name": "AppleScript", "bytes": "1208"}, {"name": "C", "bytes": "8953"}, {"name": "CSS", "bytes": "10163"}, {"name": "HTML", "bytes": "5058"}, {"name": "JavaScript", "bytes": "93489"}, {"name": "Objective-C", "bytes": "259798"}, {"name": "REALbasic", "bytes": "98880"}, {"name": "Shell", "bytes": "29131"}]}} {"text": "/**\n * @author Toru Nagashima\n * See LICENSE file in root directory for full license.\n */\n\"use strict\"\n\nconst { READ } = require(\"eslint-utils\")\nconst checkForPreferGlobal = require(\"../../util/check-prefer-global\")\n\nconst trackMap = {\n globals: {\n process: { [READ]: true },\n },\n modules: {\n process: { [READ]: true },\n },\n}\n\nmodule.exports = {\n meta: {\n docs: {\n description: 'enforce either `process` or `require(\"process\")`',\n category: \"Stylistic Issues\",\n recommended: false,\n url:\n \"https://github.com/mysticatea/eslint-plugin-node/blob/v11.1.0/docs/rules/prefer-global/process.md\",\n },\n type: \"suggestion\",\n fixable: null,\n schema: [{ enum: [\"always\", \"never\"] }],\n messages: {\n preferGlobal:\n \"Unexpected use of 'require(\\\"process\\\")'. Use the global variable 'process' instead.\",\n preferModule:\n \"Unexpected use of the global variable 'process'. Use 'require(\\\"process\\\")' instead.\",\n },\n },\n\n create(context) {\n return {\n \"Program:exit\"() {\n checkForPreferGlobal(context, trackMap)\n },\n }\n },\n}\n", "meta": {"content_hash": "18574d225594fc58aa63fbcf4f99929f", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 116, "avg_line_length": 27.282608695652176, "alnum_prop": 0.5306772908366534, "repo_name": "BigBoss424/portfolio", "id": "05482f756ed77730071cda9a6d69ce7c696c4086", "size": "1255", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "v6/node_modules/eslint-plugin-node/lib/rules/prefer-global/process.js", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\ufeffusing System;\nusing System.Diagnostics;\n\n#pragma warning disable 1591\n// ReSharper disable UnusedMember.Global\n// ReSharper disable MemberCanBePrivate.Global\n// ReSharper disable UnusedAutoPropertyAccessor.Global\n// ReSharper disable IntroduceOptionalParameters.Global\n// ReSharper disable MemberCanBeProtected.Global\n// ReSharper disable InconsistentNaming\n\n// ReSharper disable once CheckNamespace\nnamespace StackingEntities.Model.Annotations\n{\n /// \n /// Indicates that the value of the marked element could be null sometimes,\n /// so the check for null is necessary before its usage\n /// \n /// \n /// [CanBeNull] public object Test() { return null; }\n /// public void UseTest() {\n /// var p = Test();\n /// var s = p.ToString(); // Warning: Possible 'System.NullReferenceException'\n /// }\n /// \n [AttributeUsage(\n AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property |\n AttributeTargets.Delegate | AttributeTargets.Field | AttributeTargets.Event)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class CanBeNullAttribute : Attribute { }\n\n /// \n /// Indicates that the value of the marked element could never be null\n /// \n /// \n /// [NotNull] public object Foo() {\n /// return null; // Warning: Possible 'null' assignment\n /// }\n /// \n [AttributeUsage(\n AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property |\n AttributeTargets.Delegate | AttributeTargets.Field | AttributeTargets.Event)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class NotNullAttribute : Attribute { }\n\n /// \n /// Indicates that collection or enumerable value does not contain null elements\n /// \n [AttributeUsage(\n AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property |\n AttributeTargets.Delegate | AttributeTargets.Field)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class ItemNotNullAttribute : Attribute { }\n\n /// \n /// Indicates that collection or enumerable value can contain null elements\n /// \n [AttributeUsage(\n AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Property |\n AttributeTargets.Delegate | AttributeTargets.Field)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class ItemCanBeNullAttribute : Attribute { }\n\n /// \n /// Indicates that the marked method builds string by format pattern and (optional) arguments.\n /// Parameter, which contains format string, should be given in constructor. The format string\n /// should be in -like form\n /// \n /// \n /// [StringFormatMethod(\"message\")]\n /// public void ShowError(string message, params object[] args) { /* do something */ }\n /// public void Foo() {\n /// ShowError(\"Failed: {0}\"); // Warning: Non-existing argument in format string\n /// }\n /// \n [AttributeUsage(\n AttributeTargets.Constructor | AttributeTargets.Method | AttributeTargets.Delegate)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class StringFormatMethodAttribute : Attribute\n {\n /// \n /// Specifies which parameter of an annotated method should be treated as format-string\n /// \n public StringFormatMethodAttribute(string formatParameterName)\n {\n FormatParameterName = formatParameterName;\n }\n\n public string FormatParameterName { get; private set; }\n }\n\n /// \n /// For a parameter that is expected to be one of the limited set of values.\n /// Specify fields of which type should be used as values for this parameter.\n /// \n [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.Field)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class ValueProviderAttribute : Attribute\n {\n public ValueProviderAttribute(string name)\n {\n Name = name;\n }\n\n [NotNull] public string Name { get; private set; }\n }\n\n /// \n /// Indicates that the function argument should be string literal and match one\n /// of the parameters of the caller function. For example, ReSharper annotates\n /// the parameter of \n /// \n /// \n /// public void Foo(string param) {\n /// if (param == null)\n /// throw new ArgumentNullException(\"par\"); // Warning: Cannot resolve symbol\n /// }\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class InvokerParameterNameAttribute : Attribute { }\n\n /// \n /// Indicates that the method is contained in a type that implements\n /// System.ComponentModel.INotifyPropertyChanged interface and this method\n /// is used to notify that some property value changed\n /// \n /// \n /// The method should be non-static and conform to one of the supported signatures:\n /// \n /// NotifyChanged(string)\n /// NotifyChanged(params string[])\n /// NotifyChanged{T}(Expression{Func{T}})\n /// NotifyChanged{T,U}(Expression{Func{T,U}})\n /// SetProperty{T}(ref T, T, string)\n /// \n /// \n /// \n /// public class Foo : INotifyPropertyChanged {\n /// public event PropertyChangedEventHandler PropertyChanged;\n /// [NotifyPropertyChangedInvocator]\n /// protected virtual void NotifyChanged(string propertyName) { ... }\n ///\n /// private string _name;\n /// public string Name {\n /// get { return _name; }\n /// set { _name = value; NotifyChanged(\"LastName\"); /* Warning */ }\n /// }\n /// }\n /// \n /// Examples of generated notifications:\n /// \n /// NotifyChanged(\"Property\")\n /// NotifyChanged(() => Property)\n /// NotifyChanged((VM x) => x.Property)\n /// SetProperty(ref myField, value, \"Property\")\n /// \n /// \n [AttributeUsage(AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class NotifyPropertyChangedInvocatorAttribute : Attribute\n {\n public NotifyPropertyChangedInvocatorAttribute() { }\n public NotifyPropertyChangedInvocatorAttribute(string parameterName)\n {\n ParameterName = parameterName;\n }\n\n public string ParameterName { get; private set; }\n }\n\n /// \n /// Describes dependency between method input and output\n /// \n /// \n ///

Function Definition Table syntax:

\n /// \n /// FDT ::= FDTRow [;FDTRow]*\n /// FDTRow ::= Input => Output | Output <= Input\n /// Input ::= ParameterName: Value [, Input]*\n /// Output ::= [ParameterName: Value]* {halt|stop|void|nothing|Value}\n /// Value ::= true | false | null | notnull | canbenull\n /// \n /// If method has single input parameter, it's name could be omitted.
\n /// Using halt (or void/nothing, which is the same)\n /// for method output means that the methos doesn't return normally.
\n /// canbenull annotation is only applicable for output parameters.
\n /// You can use multiple [ContractAnnotation] for each FDT row,\n /// or use single attribute with rows separated by semicolon.
\n ///
\n /// \n /// \n /// [ContractAnnotation(\"=> halt\")]\n /// public void TerminationMethod()\n /// \n /// \n /// [ContractAnnotation(\"halt <= condition: false\")]\n /// public void Assert(bool condition, string text) // regular assertion method\n /// \n /// \n /// [ContractAnnotation(\"s:null => true\")]\n /// public bool IsNullOrEmpty(string s) // string.IsNullOrEmpty()\n /// \n /// \n /// // A method that returns null if the parameter is null,\n /// // and not null if the parameter is not null\n /// [ContractAnnotation(\"null => null; notnull => notnull\")]\n /// public object Transform(object data) \n /// \n /// \n /// [ContractAnnotation(\"s:null=>false; =>true,result:notnull; =>false, result:null\")]\n /// public bool TryParse(string s, out Person result)\n /// \n /// \n [AttributeUsage(AttributeTargets.Method, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class ContractAnnotationAttribute : Attribute\n {\n public ContractAnnotationAttribute([NotNull] string contract)\n : this(contract, false) { }\n\n public ContractAnnotationAttribute([NotNull] string contract, bool forceFullStates)\n {\n Contract = contract;\n ForceFullStates = forceFullStates;\n }\n\n public string Contract { get; private set; }\n public bool ForceFullStates { get; private set; }\n }\n\n /// \n /// Indicates that marked element should be localized or not\n /// \n /// \n /// [LocalizationRequiredAttribute(true)]\n /// public class Foo {\n /// private string str = \"my string\"; // Warning: Localizable string\n /// }\n /// \n [AttributeUsage(AttributeTargets.All)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class LocalizationRequiredAttribute : Attribute\n {\n public LocalizationRequiredAttribute() : this(true) { }\n public LocalizationRequiredAttribute(bool required)\n {\n Required = required;\n }\n\n public bool Required { get; private set; }\n }\n\n /// \n /// Indicates that the value of the marked type (or its derivatives)\n /// cannot be compared using '==' or '!=' operators and Equals()\n /// should be used instead. However, using '==' or '!=' for comparison\n /// with null is always permitted.\n /// \n /// \n /// [CannotApplyEqualityOperator]\n /// class NoEquality { }\n /// class UsesNoEquality {\n /// public void Test() {\n /// var ca1 = new NoEquality();\n /// var ca2 = new NoEquality();\n /// if (ca1 != null) { // OK\n /// bool condition = ca1 == ca2; // Warning\n /// }\n /// }\n /// }\n /// \n [AttributeUsage(\n AttributeTargets.Interface | AttributeTargets.Class | AttributeTargets.Struct)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class CannotApplyEqualityOperatorAttribute : Attribute { }\n\n /// \n /// When applied to a target attribute, specifies a requirement for any type marked\n /// with the target attribute to implement or inherit specific type or types.\n /// \n /// \n /// [BaseTypeRequired(typeof(IComponent)] // Specify requirement\n /// public class ComponentAttribute : Attribute { }\n /// [Component] // ComponentAttribute requires implementing IComponent interface\n /// public class MyComponent : IComponent { }\n /// \n [AttributeUsage(AttributeTargets.Class, AllowMultiple = true)]\n [BaseTypeRequired(typeof(Attribute))]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class BaseTypeRequiredAttribute : Attribute\n {\n public BaseTypeRequiredAttribute([NotNull] Type baseType)\n {\n BaseType = baseType;\n }\n\n [NotNull] public Type BaseType { get; private set; }\n }\n\n /// \n /// Indicates that the marked symbol is used implicitly\n /// (e.g. via reflection, in external library), so this symbol\n /// will not be marked as unused (as well as by other usage inspections)\n /// \n [AttributeUsage(AttributeTargets.All)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class UsedImplicitlyAttribute : Attribute\n {\n public UsedImplicitlyAttribute()\n : this(ImplicitUseKindFlags.Default, ImplicitUseTargetFlags.Default) { }\n\n public UsedImplicitlyAttribute(ImplicitUseKindFlags useKindFlags)\n : this(useKindFlags, ImplicitUseTargetFlags.Default) { }\n\n public UsedImplicitlyAttribute(ImplicitUseTargetFlags targetFlags)\n : this(ImplicitUseKindFlags.Default, targetFlags) { }\n\n public UsedImplicitlyAttribute(\n ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags)\n {\n UseKindFlags = useKindFlags;\n TargetFlags = targetFlags;\n }\n\n public ImplicitUseKindFlags UseKindFlags { get; private set; }\n public ImplicitUseTargetFlags TargetFlags { get; private set; }\n }\n\n /// \n /// Should be used on attributes and causes ReSharper\n /// to not mark symbols marked with such attributes as unused\n /// (as well as by other usage inspections)\n /// \n [AttributeUsage(AttributeTargets.Class | AttributeTargets.GenericParameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class MeansImplicitUseAttribute : Attribute\n {\n public MeansImplicitUseAttribute() \n : this(ImplicitUseKindFlags.Default, ImplicitUseTargetFlags.Default) { }\n\n public MeansImplicitUseAttribute(ImplicitUseKindFlags useKindFlags)\n : this(useKindFlags, ImplicitUseTargetFlags.Default) { }\n\n public MeansImplicitUseAttribute(ImplicitUseTargetFlags targetFlags)\n : this(ImplicitUseKindFlags.Default, targetFlags) { }\n\n public MeansImplicitUseAttribute(\n ImplicitUseKindFlags useKindFlags, ImplicitUseTargetFlags targetFlags)\n {\n UseKindFlags = useKindFlags;\n TargetFlags = targetFlags;\n }\n\n [UsedImplicitly] public ImplicitUseKindFlags UseKindFlags { get; private set; }\n [UsedImplicitly] public ImplicitUseTargetFlags TargetFlags { get; private set; }\n }\n \n [Flags]\n public enum ImplicitUseKindFlags\n {\n Default = Access | Assign | InstantiatedWithFixedConstructorSignature,\n /// Only entity marked with attribute considered used\n Access = 1,\n /// Indicates implicit assignment to a member\n Assign = 2,\n /// \n /// Indicates implicit instantiation of a type with fixed constructor signature.\n /// That means any unused constructor parameters won't be reported as such.\n /// \n InstantiatedWithFixedConstructorSignature = 4,\n /// Indicates implicit instantiation of a type\n InstantiatedNoFixedConstructorSignature = 8,\n }\n\n /// \n /// Specify what is considered used implicitly when marked\n /// with or \n /// \n [Flags]\n public enum ImplicitUseTargetFlags\n {\n Default = Itself,\n Itself = 1,\n /// Members of entity marked with attribute are considered used\n Members = 2,\n /// Entity marked with attribute and all its members considered used\n WithMembers = Itself | Members\n }\n\n /// \n /// This attribute is intended to mark publicly available API\n /// which should not be removed and so is treated as used\n /// \n [MeansImplicitUse]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class PublicAPIAttribute : Attribute\n {\n public PublicAPIAttribute() { }\n public PublicAPIAttribute([NotNull] string comment)\n {\n Comment = comment;\n }\n\n public string Comment { get; private set; }\n }\n\n /// \n /// Tells code analysis engine if the parameter is completely handled\n /// when the invoked method is on stack. If the parameter is a delegate,\n /// indicates that delegate is executed while the method is executed.\n /// If the parameter is an enumerable, indicates that it is enumerated\n /// while the method is executed\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class InstantHandleAttribute : Attribute { }\n\n /// \n /// Indicates that a method does not make any observable state changes.\n /// The same as System.Diagnostics.Contracts.PureAttribute\n /// \n /// \n /// [Pure] private int Multiply(int x, int y) { return x * y; }\n /// public void Foo() {\n /// const int a = 2, b = 2;\n /// Multiply(a, b); // Waring: Return value of pure method is not used\n /// }\n /// \n [AttributeUsage(AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class PureAttribute : Attribute { }\n\n /// \n /// Indicates that a parameter is a path to a file or a folder within a web project.\n /// Path can be relative or absolute, starting from web root (~)\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public class PathReferenceAttribute : Attribute\n {\n public PathReferenceAttribute() { }\n public PathReferenceAttribute([PathReference] string basePath)\n {\n BasePath = basePath;\n }\n\n public string BasePath { get; private set; }\n }\n\n [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcAreaMasterLocationFormatAttribute : Attribute\n {\n public AspMvcAreaMasterLocationFormatAttribute(string format)\n {\n Format = format;\n }\n\n public string Format { get; private set; }\n }\n\n [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcAreaPartialViewLocationFormatAttribute : Attribute\n {\n public AspMvcAreaPartialViewLocationFormatAttribute(string format)\n {\n Format = format;\n }\n\n public string Format { get; private set; }\n }\n\n [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcAreaViewLocationFormatAttribute : Attribute\n {\n public AspMvcAreaViewLocationFormatAttribute(string format)\n {\n Format = format;\n }\n\n public string Format { get; private set; }\n }\n\n [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcMasterLocationFormatAttribute : Attribute\n {\n public AspMvcMasterLocationFormatAttribute(string format)\n {\n Format = format;\n }\n\n public string Format { get; private set; }\n }\n\n [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcPartialViewLocationFormatAttribute : Attribute\n {\n public AspMvcPartialViewLocationFormatAttribute(string format)\n {\n Format = format;\n }\n\n public string Format { get; private set; }\n }\n\n [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcViewLocationFormatAttribute : Attribute\n {\n public AspMvcViewLocationFormatAttribute(string format)\n {\n Format = format;\n }\n\n public string Format { get; private set; }\n }\n \n /// \n /// ASP.NET MVC attribute. If applied to a parameter, indicates that the parameter\n /// is an MVC action. If applied to a method, the MVC action name is calculated\n /// implicitly from the context. Use this attribute for custom wrappers similar to\n /// System.Web.Mvc.Html.ChildActionExtensions.RenderAction(HtmlHelper, String)\n /// \n [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcActionAttribute : Attribute\n {\n public AspMvcActionAttribute() { }\n public AspMvcActionAttribute(string anonymousProperty)\n {\n AnonymousProperty = anonymousProperty;\n }\n\n public string AnonymousProperty { get; private set; }\n }\n\n /// \n /// ASP.NET MVC attribute. Indicates that a parameter is an MVC area.\n /// Use this attribute for custom wrappers similar to\n /// System.Web.Mvc.Html.ChildActionExtensions.RenderAction(HtmlHelper, String)\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcAreaAttribute : PathReferenceAttribute\n {\n public AspMvcAreaAttribute() { }\n public AspMvcAreaAttribute(string anonymousProperty)\n {\n AnonymousProperty = anonymousProperty;\n }\n\n public string AnonymousProperty { get; private set; }\n }\n\n /// \n /// ASP.NET MVC attribute. If applied to a parameter, indicates that the parameter is\n /// an MVC controller. If applied to a method, the MVC controller name is calculated\n /// implicitly from the context. Use this attribute for custom wrappers similar to\n /// System.Web.Mvc.Html.ChildActionExtensions.RenderAction(HtmlHelper, String, String)\n /// \n [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcControllerAttribute : Attribute\n {\n public AspMvcControllerAttribute() { }\n public AspMvcControllerAttribute(string anonymousProperty)\n {\n AnonymousProperty = anonymousProperty;\n }\n\n public string AnonymousProperty { get; private set; }\n }\n\n /// \n /// ASP.NET MVC attribute. Indicates that a parameter is an MVC Master. Use this attribute\n /// for custom wrappers similar to System.Web.Mvc.Controller.View(String, String)\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcMasterAttribute : Attribute { }\n\n /// \n /// ASP.NET MVC attribute. Indicates that a parameter is an MVC model type. Use this attribute\n /// for custom wrappers similar to System.Web.Mvc.Controller.View(String, Object)\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcModelTypeAttribute : Attribute { }\n\n /// \n /// ASP.NET MVC attribute. If applied to a parameter, indicates that the parameter is an MVC\n /// partial view. If applied to a method, the MVC partial view name is calculated implicitly\n /// from the context. Use this attribute for custom wrappers similar to\n /// System.Web.Mvc.Html.RenderPartialExtensions.RenderPartial(HtmlHelper, String)\n /// \n [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcPartialViewAttribute : PathReferenceAttribute { }\n\n /// \n /// ASP.NET MVC attribute. Allows disabling inspections for MVC views within a class or a method\n /// \n [AttributeUsage(AttributeTargets.Class | AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcSupressViewErrorAttribute : Attribute { }\n\n /// \n /// ASP.NET MVC attribute. Indicates that a parameter is an MVC display template.\n /// Use this attribute for custom wrappers similar to \n /// System.Web.Mvc.Html.DisplayExtensions.DisplayForModel(HtmlHelper, String)\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcDisplayTemplateAttribute : Attribute { }\n\n /// \n /// ASP.NET MVC attribute. Indicates that a parameter is an MVC editor template.\n /// Use this attribute for custom wrappers similar to\n /// System.Web.Mvc.Html.EditorExtensions.EditorForModel(HtmlHelper, String)\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcEditorTemplateAttribute : Attribute { }\n\n /// \n /// ASP.NET MVC attribute. Indicates that a parameter is an MVC template.\n /// Use this attribute for custom wrappers similar to\n /// System.ComponentModel.DataAnnotations.UIHintAttribute(System.String)\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcTemplateAttribute : Attribute { }\n\n /// \n /// ASP.NET MVC attribute. If applied to a parameter, indicates that the parameter\n /// is an MVC view. If applied to a method, the MVC view name is calculated implicitly\n /// from the context. Use this attribute for custom wrappers similar to\n /// System.Web.Mvc.Controller.View(Object)\n /// \n [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcViewAttribute : PathReferenceAttribute { }\n\n /// \n /// ASP.NET MVC attribute. When applied to a parameter of an attribute,\n /// indicates that this parameter is an MVC action name\n /// \n /// \n /// [ActionName(\"Foo\")]\n /// public ActionResult Login(string returnUrl) {\n /// ViewBag.ReturnUrl = Url.Action(\"Foo\"); // OK\n /// return RedirectToAction(\"Bar\"); // Error: Cannot resolve action\n /// }\n /// \n [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Property)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AspMvcActionSelectorAttribute : Attribute { }\n\n [AttributeUsage(\n AttributeTargets.Parameter | AttributeTargets.Property | AttributeTargets.Field)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class HtmlElementAttributesAttribute : Attribute\n {\n public HtmlElementAttributesAttribute() { }\n public HtmlElementAttributesAttribute(string name)\n {\n Name = name;\n }\n\n public string Name { get; private set; }\n }\n\n [AttributeUsage(\n AttributeTargets.Parameter | AttributeTargets.Field | AttributeTargets.Property)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class HtmlAttributeValueAttribute : Attribute\n {\n public HtmlAttributeValueAttribute([NotNull] string name)\n {\n Name = name;\n }\n\n [NotNull] public string Name { get; private set; }\n }\n\n /// \n /// Razor attribute. Indicates that a parameter or a method is a Razor section.\n /// Use this attribute for custom wrappers similar to \n /// System.Web.WebPages.WebPageBase.RenderSection(String)\n /// \n [AttributeUsage(AttributeTargets.Parameter | AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class RazorSectionAttribute : Attribute { }\n\n /// \n /// Indicates how method invocation affects content of the collection\n /// \n [AttributeUsage(AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class CollectionAccessAttribute : Attribute\n {\n public CollectionAccessAttribute(CollectionAccessType collectionAccessType)\n {\n CollectionAccessType = collectionAccessType;\n }\n\n public CollectionAccessType CollectionAccessType { get; private set; }\n }\n\n [Flags]\n public enum CollectionAccessType\n {\n /// Method does not use or modify content of the collection\n None = 0,\n /// Method only reads content of the collection but does not modify it\n Read = 1,\n /// Method can change content of the collection but does not add new elements\n ModifyExistingContent = 2,\n /// Method can add new elements to the collection\n UpdatedContent = ModifyExistingContent | 4\n }\n\n /// \n /// Indicates that the marked method is assertion method, i.e. it halts control flow if\n /// one of the conditions is satisfied. To set the condition, mark one of the parameters with \n /// attribute\n /// \n [AttributeUsage(AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AssertionMethodAttribute : Attribute { }\n\n /// \n /// Indicates the condition parameter of the assertion method. The method itself should be\n /// marked by attribute. The mandatory argument of\n /// the attribute is the assertion type.\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class AssertionConditionAttribute : Attribute\n {\n public AssertionConditionAttribute(AssertionConditionType conditionType)\n {\n ConditionType = conditionType;\n }\n\n public AssertionConditionType ConditionType { get; private set; }\n }\n\n /// \n /// Specifies assertion type. If the assertion method argument satisfies the condition,\n /// then the execution continues. Otherwise, execution is assumed to be halted\n /// \n public enum AssertionConditionType\n {\n /// Marked parameter should be evaluated to true\n IS_TRUE = 0,\n /// Marked parameter should be evaluated to false\n IS_FALSE = 1,\n /// Marked parameter should be evaluated to null value\n IS_NULL = 2,\n /// Marked parameter should be evaluated to not null value\n IS_NOT_NULL = 3,\n }\n\n /// \n /// Indicates that the marked method unconditionally terminates control flow execution.\n /// For example, it could unconditionally throw exception\n /// \n [Obsolete(\"Use [ContractAnnotation('=> halt')] instead\")]\n [AttributeUsage(AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class TerminatesProgramAttribute : Attribute { }\n\n /// \n /// Indicates that method is pure LINQ method, with postponed enumeration (like Enumerable.Select,\n /// .Where). This annotation allows inference of [InstantHandle] annotation for parameters\n /// of delegate type by analyzing LINQ method chains.\n /// \n [AttributeUsage(AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class LinqTunnelAttribute : Attribute { }\n\n /// \n /// Indicates that IEnumerable, passed as parameter, is not enumerated.\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class NoEnumerationAttribute : Attribute { }\n\n /// \n /// XAML attribute. Indicates the type that has ItemsSource property and should be\n /// treated as ItemsControl-derived type, to enable inner items DataContext\n /// type resolve.\n /// \n [AttributeUsage(AttributeTargets.Class)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class XamlItemsControlAttribute : Attribute { }\n\n /// \n /// XAML attibute. Indicates the property of some BindingBase-derived type, that\n /// is used to bind some item of ItemsControl-derived type. This annotation will\n /// enable the DataContext type resolve for XAML bindings for such properties.\n /// \n /// \n /// Property should have the tree ancestor of the ItemsControl type or\n /// marked with the attribute.\n /// \n [AttributeUsage(AttributeTargets.Property)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class XamlItemBindingOfItemsControlAttribute : Attribute { }\n\n [AttributeUsage(AttributeTargets.Class, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public class AspChildControlTypeAttribute : Attribute\n {\n public AspChildControlTypeAttribute(string tagName, Type controlType)\n {\n TagName = tagName;\n ControlType = controlType;\n }\n\n public string TagName { get; private set; }\n public Type ControlType { get; private set; }\n }\n\n [AttributeUsage(AttributeTargets.Property | AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public class AspDataFieldAttribute : Attribute { }\n\n [AttributeUsage(AttributeTargets.Property | AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public class AspDataFieldsAttribute : Attribute { }\n\n [AttributeUsage(AttributeTargets.Property)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public class AspMethodPropertyAttribute : Attribute { }\n\n [AttributeUsage(AttributeTargets.Class, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public class AspRequiredAttributeAttribute : Attribute\n {\n public AspRequiredAttributeAttribute([NotNull] string attribute)\n {\n Attribute = attribute;\n }\n\n public string Attribute { get; private set; }\n }\n\n [AttributeUsage(AttributeTargets.Property)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public class AspTypePropertyAttribute : Attribute\n {\n public bool CreateConstructorReferences { get; private set; }\n\n public AspTypePropertyAttribute(bool createConstructorReferences)\n {\n CreateConstructorReferences = createConstructorReferences;\n }\n }\n\n [AttributeUsage(AttributeTargets.Assembly, AllowMultiple = true)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class RazorImportNamespaceAttribute : Attribute\n {\n public RazorImportNamespaceAttribute(string name)\n {\n Name = name;\n }\n\n public string Name { get; private set; }\n }\n\n [AttributeUsage(AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class RazorHelperCommonAttribute : Attribute { }\n\n [AttributeUsage(AttributeTargets.Property)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class RazorLayoutAttribute : Attribute { }\n\n [AttributeUsage(AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class RazorWriteLiteralMethodAttribute : Attribute { }\n\n [AttributeUsage(AttributeTargets.Method)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class RazorWriteMethodAttribute : Attribute { }\n\n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class RazorWriteMethodParameterAttribute : Attribute { }\n\n /// \n /// Indicates that parameter is regular expression pattern.\n /// \n [AttributeUsage(AttributeTargets.Parameter)]\n [Conditional(\"JETBRAINS_ANNOTATIONS\")]\n public sealed class RegexPatternAttribute : Attribute { }\n\n}", "meta": {"content_hash": "5971dcb4d1b4e83d0efc46f35bf6e02c", "timestamp": "", "source": "github", "line_count": 906, "max_line_length": 100, "avg_line_length": 37.67439293598234, "alnum_prop": 0.7113936659537691, "repo_name": "Sidneys1/MinecraftMultitool", "id": "ea30d7cff35bf1fc06f85081954a6d11e0e98a7c", "size": "34135", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "StackingEntities.Model/Properties/Annotations.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "298077"}]}} {"text": "\n\n\n\n Class\n \n \n\n\n\n\n\n\n\n\n\n \n \n
\n
\n \n\n\n \n\n\n \n \n\n\n \n\n\n \n \n
Methods
\n
\n \n
C
\n
\n \n
\n \n
S
\n
\n \n
\n \n
\n \n\n \n\n\n\n \n\n \n\n \n\n \n\n\n \n\n\n \n \n
Instance Public methods
\n \n
\n
\n \n class_attribute(*attrs)\n \n Link\n
\n \n \n
\n

Declare a class-level attribute whose value is inheritable by subclasses.\nSubclasses can change their own value and it will not impact parent class.

\n\n
class Base\n  class_attribute :setting\nend\n\nclass Subclass < Base\nend\n\nBase.setting = true\nSubclass.setting            # => true\nSubclass.setting = false\nSubclass.setting            # => false\nBase.setting                # => true\n
\n\n

In the above case as long as Subclass does not assign a value to setting by\nperforming Subclass.setting = something ,\nSubclass.setting would read value assigned to parent class.\nOnce Subclass assigns a value then the value assigned by Subclass would be\nreturned.

\n\n

This matches normal Ruby method inheritance: think of writing an attribute\non a subclass as overriding the reader method. However, you need to be\naware when using class_attribute with mutable structures as\nArray or Hash. In such cases, you don't want\nto do changes in places but use setters:

\n\n
Base.setting = []\nBase.setting                # => []\nSubclass.setting            # => []\n\n# Appending in child changes both parent and child because it is the same object:\nSubclass.setting << :foo\nBase.setting               # => [:foo]\nSubclass.setting           # => [:foo]\n\n# Use setters to not propagate changes:\nBase.setting = []\nSubclass.setting += [:foo]\nBase.setting               # => []\nSubclass.setting           # => [:foo]\n
\n\n

For convenience, an instance predicate method is defined as well. To skip\nit, pass instance_predicate: false.

\n\n
Subclass.setting?       # => false\n
\n\n

Instances may overwrite the class value in the same way:

\n\n
Base.setting = true\nobject = Base.new\nobject.setting          # => true\nobject.setting = false\nobject.setting          # => false\nBase.setting            # => true\n
\n\n

To opt out of the instance reader method, pass instance_reader:\nfalse.

\n\n
object.setting          # => NoMethodError\nobject.setting?         # => NoMethodError\n
\n\n

To opt out of the instance writer method, pass instance_writer:\nfalse.

\n\n
object.setting = false  # => NoMethodError\n
\n\n

To opt out of both instance methods, pass instance_accessor:\nfalse.

\n
\n \n \n \n\n \n \n \n \n
\n \n

\n Source: \n show\n \n

\n
\n
# File ../.rvm/gems/ruby-2.2.0/gems/activesupport-4.1.8/lib/active_support/core_ext/class/attribute.rb, line 71\ndef class_attribute(*attrs)\n  options = attrs.extract_options!\n  instance_reader = options.fetch(:instance_accessor, true) && options.fetch(:instance_reader, true)\n  instance_writer = options.fetch(:instance_accessor, true) && options.fetch(:instance_writer, true)\n  instance_predicate = options.fetch(:instance_predicate, true)\n\n  attrs.each do |name|\n    define_singleton_method(name) { nil }\n    define_singleton_method("#{name}?") { !!public_send(name) } if instance_predicate\n\n    ivar = "@#{name}"\n\n    define_singleton_method("#{name}=") do |val|\n      singleton_class.class_eval do\n        remove_possible_method(name)\n        define_method(name) { val }\n      end\n\n      if singleton_class?\n        class_eval do\n          remove_possible_method(name)\n          define_method(name) do\n            if instance_variable_defined? ivar\n              instance_variable_get ivar\n            else\n              singleton_class.send name\n            end\n          end\n        end\n      end\n      val\n    end\n\n    if instance_reader\n      remove_possible_method name\n      define_method(name) do\n        if instance_variable_defined?(ivar)\n          instance_variable_get ivar\n        else\n          self.class.public_send name\n        end\n      end\n      define_method("#{name}?") { !!public_send(name) } if instance_predicate\n    end\n\n    attr_writer name if instance_writer\n  end\nend
\n
\n
\n \n
\n \n
\n
\n \n subclasses()\n \n Link\n
\n \n \n
\n

Returns an array with the direct children of self.

\n\n
Integer.subclasses # => [Fixnum, Bignum]\n\nclass Foo; end\nclass Bar < Foo; end\nclass Baz < Bar; end\n\nFoo.subclasses # => [Bar]\n
\n
\n \n \n \n\n \n \n \n \n
\n \n

\n Source: \n show\n \n

\n
\n
# File ../.rvm/gems/ruby-2.2.0/gems/activesupport-4.1.8/lib/active_support/core_ext/class/subclasses.rb, line 35\ndef subclasses\n  subclasses, chain = [], descendants\n  chain.each do |k|\n    subclasses << k unless chain.any? { |c| c > k }\n  end\n  subclasses\nend
\n
\n
\n \n
\n \n
\n
\n \n superclass_delegating_accessor(name, options = {})\n \n Link\n
\n \n \n
\n \n
\n \n \n \n\n \n \n \n \n
\n \n

\n Source: \n show\n \n

\n
\n
# File ../.rvm/gems/ruby-2.2.0/gems/activesupport-4.1.8/lib/active_support/core_ext/class/delegating_attributes.rb, line 5\ndef superclass_delegating_accessor(name, options = {})\n  # Create private _name and _name= methods that can still be used if the public\n  # methods are overridden.\n  _superclass_delegating_accessor("_#{name}", options)\n\n  # Generate the public methods name, name=, and name?.\n  # These methods dispatch to the private _name, and _name= methods, making them\n  # overridable.\n  singleton_class.send(:define_method, name) { send("_#{name}") }\n  singleton_class.send(:define_method, "#{name}?") { !!send("_#{name}") }\n  singleton_class.send(:define_method, "#{name}=") { |value| send("_#{name}=", value) }\n\n  # If an instance_reader is needed, generate public instance methods name and name?.\n  if options[:instance_reader] != false\n    define_method(name) { send("_#{name}") }\n    define_method("#{name}?") { !!send("#{name}") }\n  end\nend
\n
\n
\n \n
\n
\n\n
\n \n ", "meta": {"content_hash": "70b72cfb262e58a9070a311e29611ac4", "timestamp": "", "source": "github", "line_count": 378, "max_line_length": 493, "avg_line_length": 50.55291005291005, "alnum_prop": 0.62033596734523, "repo_name": "kristoferrobin/p2p", "id": "0e6aba409bb7dad702c95b3e19a0a2a0d83ce133", "size": "19109", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/api/classes/Class.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "1033"}, {"name": "CoffeeScript", "bytes": "422"}, {"name": "HTML", "bytes": "5432"}, {"name": "JavaScript", "bytes": "664"}, {"name": "Ruby", "bytes": "24321"}]}} {"text": "matches = new RouteMatch(array(\n 'foo' => 'bar',\n 'baz' => 'inga',\n ));\n $this->query = new Parameters(array(\n 'foo' => 'bar',\n 'baz' => 'inga',\n ));\n\n $this->event = new ResourceEvent();\n }\n\n public function testRouteMatchIsNullByDefault()\n {\n $this->assertNull($this->event->getRouteMatch());\n }\n\n public function testQueryParamsAreNullByDefault()\n {\n $this->assertNull($this->event->getQueryParams());\n }\n\n public function testRouteMatchIsMutable()\n {\n $this->event->setRouteMatch($this->matches);\n $this->assertSame($this->matches, $this->event->getRouteMatch());\n return $this->event;\n }\n\n public function testQueryParamsAreMutable()\n {\n $this->event->setQueryParams($this->query);\n $this->assertSame($this->query, $this->event->getQueryParams());\n return $this->event;\n }\n\n public function testRequestIsNullByDefault()\n {\n $this->assertNull($this->event->getRequest());\n }\n\n public function testRequestIsMutable()\n {\n $request = new HttpRequest();\n $this->event->setRequest($request);\n $this->assertSame($request, $this->event->getRequest());\n return $this->event;\n }\n\n /**\n * @depends testRouteMatchIsMutable\n */\n public function testRouteMatchIsNullable(ResourceEvent $event)\n {\n $event->setRouteMatch(null);\n $this->assertNull($event->getRouteMatch());\n }\n\n /**\n * @depends testQueryParamsAreMutable\n */\n public function testQueryParamsAreNullable(ResourceEvent $event)\n {\n $event->setQueryParams(null);\n $this->assertNull($event->getQueryParams());\n }\n\n /**\n * @depends testRequestIsMutable\n */\n public function testRequestIsNullable(ResourceEvent $event)\n {\n $event->setRequest(null);\n $this->assertNull($event->getRequest());\n }\n\n public function testCanInjectRequestViaSetParams()\n {\n $request = new HttpRequest();\n $this->event->setParams(array('request' => $request));\n $this->assertSame($request, $this->event->getRequest());\n }\n\n public function testCanFetchIndividualRouteParameter()\n {\n $this->event->setRouteMatch($this->matches);\n $this->assertEquals('bar', $this->event->getRouteParam('foo'));\n $this->assertEquals('inga', $this->event->getRouteParam('baz'));\n }\n\n public function testCanFetchIndividualQueryParameter()\n {\n $this->event->setQueryParams($this->query);\n $this->assertEquals('bar', $this->event->getQueryParam('foo'));\n $this->assertEquals('inga', $this->event->getQueryParam('baz'));\n }\n\n public function testReturnsDefaultParameterWhenPullingUnknownRouteParameter()\n {\n $this->assertNull($this->event->getRouteParam('foo'));\n $this->assertEquals('bat', $this->event->getRouteParam('baz', 'bat'));\n }\n\n public function testReturnsDefaultParameterWhenPullingUnknownQueryParameter()\n {\n $this->assertNull($this->event->getQueryParam('foo'));\n $this->assertEquals('bat', $this->event->getQueryParam('baz', 'bat'));\n }\n\n public function testInputFilterIsUndefinedByDefault()\n {\n $this->assertNull($this->event->getInputFilter());\n }\n\n /**\n * @depends testInputFilterIsUndefinedByDefault\n */\n public function testCanComposeInputFilter()\n {\n $inputFilter = new InputFilter();\n $this->event->setInputFilter($inputFilter);\n $this->assertSame($inputFilter, $this->event->getInputFilter());\n }\n\n /**\n * @depends testCanComposeInputFilter\n */\n public function testCanNullifyInputFilter()\n {\n $this->event->setInputFilter(null);\n $this->assertNull($this->event->getInputFilter());\n }\n\n public function testIdentityIsUndefinedByDefault()\n {\n $this->assertNull($this->event->getIdentity());\n }\n\n /**\n * @depends testIdentityIsUndefinedByDefault\n */\n public function testCanComposeIdentity()\n {\n $identity = new GuestIdentity();\n $this->event->setIdentity($identity);\n $this->assertSame($identity, $this->event->getIdentity());\n }\n\n /**\n * @depends testCanComposeIdentity\n */\n public function testCanNullifyIdentity()\n {\n $this->event->setIdentity(null);\n $this->assertNull($this->event->getIdentity());\n }\n}\n", "meta": {"content_hash": "57c599ca684af99478622a283893fb6f", "timestamp": "", "source": "github", "line_count": 174, "max_line_length": 81, "avg_line_length": 27.804597701149426, "alnum_prop": 0.6242248863166597, "repo_name": "nocvp/zf-rest", "id": "3d3a6c0814ca8925fcb6a3fb7b864b16d86184fa", "size": "4998", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "test/ResourceEventTest.php", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "PHP", "bytes": "211249"}]}} {"text": "package search\n\nimport (\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\n\t\"github.com/sebas7dk/go-recipes/config\"\n\t\"github.com/sebas7dk/go-recipes/models\"\n\telastigo \"github.com/mattbaird/elastigo/lib\"\n)\n\ntype Connection struct {\n\tConn *elastigo.Conn\n}\n\nvar index string\n\n//SetIndex set the index name\nfunc SetIndex(i string) {\n\tindex = i\n}\n\n//NewConnection create a new Elastic Search connection\nfunc NewConnection() (*Connection, error) {\n\tc := elastigo.NewConn()\n\tif c == nil {\n\t\treturn nil, errors.New(\"Unable to connect to Elastic Search\")\n\t}\n\n\tc.Domain = config.Get(\"ES_DOMAIN\")\n\tc.Port = config.Get(\"ES_PORT\")\n\n\tconn := &Connection{Conn: c}\n\n\treturn conn, nil\n}\n\n//Show all the docs in the index\nfunc (c *Connection) Show() ([]models.Recipe, error) {\n\tsearchJSON := `{\n \"query\" : {\n \"match_all\" : {}\n }\n }`\n\n\to, err := c.Conn.Search(index, \"recipe\", nil, searchJSON)\n\tr := BuildResults(o.Hits.Hits)\n\n\treturn r, err\n}\n\n//GetById show the doc by id\nfunc (c *Connection) GetById(id string) (*models.Recipe, error) {\n\tvar recipe *models.Recipe\n\n\to, err := c.Conn.Get(index, \"recipe\", id, nil)\n\n\tif err == nil {\n\t\tjson.Unmarshal(*o.Source, &recipe)\n\t\trecipe.Id = o.Id\n\t}\n\n\treturn recipe, err\n}\n\n//Create a new doc\nfunc (c *Connection) Create(r models.Recipe) (elastigo.BaseResponse, error) {\n\treturn c.Conn.Index(index, \"recipe\", \"\", nil, r)\n}\n\n//Update a doc by id\nfunc (c *Connection) Update(id string, r models.Recipe) (elastigo.BaseResponse, error) {\n\treturn c.Conn.Index(index, \"recipe\", id, nil, r)\n}\n\n//Query the index and match the search term\nfunc (c *Connection) Query(s string) ([]models.Recipe, error) {\n\tsearchJSON := fmt.Sprintf(`{\n\t \"query\" : {\n\t \"multi_match\": {\n\t \"query\" : \"%s\",\n\t \"fields\" : [\"title^50\", \"category^30\", \"instructions^25\", \"ingredients^20\"]\n\t }\n\t }\n\t}`, s)\n\n\to, err := c.Conn.Search(index, \"recipe\", nil, searchJSON)\n\tr := BuildResults(o.Hits.Hits)\n\n\treturn r, err\n}\n\n//Delete a doc from the index\nfunc (c *Connection) Delete(id string) (elastigo.BaseResponse, error) {\n\treturn c.Conn.Delete(index, \"recipe\", id, nil)\n}\n\n//DeleteIndex alll docs from the index\nfunc (c *Connection) DeleteIndex() (elastigo.BaseResponse, error) {\n\treturn c.Conn.DeleteIndex(index)\n}\n\n//BuildResults loop through the hits based on the total hits\nfunc BuildResults(recipes []elastigo.Hit) []models.Recipe {\n\tvar recipe models.Recipe\n\trs := make(models.Recipes, 0)\n\n\tfor _, r := range recipes {\n\t\tif err := json.Unmarshal(*r.Source, &recipe); err == nil {\n\t\t\trecipe.Id = r.Id\n\t\t\trs = append(rs, recipe)\n\t\t}\n\t}\n\n\treturn rs\n}\n", "meta": {"content_hash": "69827d11b5629dfd1a9c8eae8f826f3f", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 88, "avg_line_length": 22.102564102564102, "alnum_prop": 0.6550657385924207, "repo_name": "sebas7dk/go-recipes", "id": "1630cef66c4fa5730b0e0761ecea66022a7846a5", "size": "2586", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "search/search.go", "mode": "33188", "license": "mit", "language": [{"name": "Go", "bytes": "12353"}, {"name": "Makefile", "bytes": "277"}]}} {"text": "(function() {\n var ArrayIterator, IteratorOutput, PairwiseArrayIterator, __assert_unfinished__, __throw_iterator_finished__;\n\n __throw_iterator_finished__ = function() {\n throw new Error(\"Iterator has already finished\");\n };\n\n __assert_unfinished__ = function(iteratorOutput) {\n if (iteratorOutput.done) {\n return __throw_iterator_finished__();\n }\n };\n\n IteratorOutput = (function() {\n function IteratorOutput(value) {\n this.value = value;\n this.done = false;\n }\n\n return IteratorOutput;\n\n })();\n\n ArrayIterator = (function() {\n function ArrayIterator(array) {\n this.array = array;\n this.index = 0;\n this.out = new IteratorOutput;\n }\n\n ArrayIterator.prototype.next = function() {\n var array, index, out;\n array = this.array, index = this.index, out = this.out;\n __assert_unfinished__(out);\n if (index < array.length) {\n out.value = array[index];\n } else {\n out.value = void 0;\n out.done = true;\n }\n this.index += 1;\n return out;\n };\n\n return ArrayIterator;\n\n })();\n\n PairwiseArrayIterator = (function() {\n function PairwiseArrayIterator(array) {\n this.array = array;\n if (array.length % 2) {\n throw new Error(\"Odd number of elements\");\n }\n this.index = 0;\n this.out = new IteratorOutput([]);\n }\n\n PairwiseArrayIterator.prototype.next = function() {\n var array, index, out;\n array = this.array, index = this.index, out = this.out;\n __assert_unfinished__(out);\n if (index < array.length) {\n out.value[0] = array[index];\n out.value[1] = array[index + 1];\n } else {\n out.value[0] = void 0;\n out.value[1] = void 0;\n out.done = true;\n }\n this.index += 2;\n return out;\n };\n\n return PairwiseArrayIterator;\n\n })();\n\n module.exports = {\n ArrayIterator: ArrayIterator,\n PairwiseArrayIterator: PairwiseArrayIterator\n };\n\n}).call(this);\n", "meta": {"content_hash": "dd30bf201b9e83f436ec53e5dfd58a61", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 111, "avg_line_length": 23.678571428571427, "alnum_prop": 0.5811965811965812, "repo_name": "nickfargo/pim", "id": "5104b9cd1fe9faaea965347c580d7c37e3c8142f", "size": "2024", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/array-iterators.js", "mode": "33188", "license": "mit", "language": [{"name": "Shell", "bytes": "471"}]}} {"text": "\n\n \n \n \n projective-geometry: Not compatible \ud83d\udc7c\n \n \n \n \n \n \n \n \n \n \n
\n \n
\n
\n
\n \u00ab Up\n

\n projective-geometry\n \n 8.5.0\n Not compatible \ud83d\udc7c\n \n

\n

\ud83d\udcc5 (2022-04-08 16:37:15 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-threads        base\nbase-unix           base\ncamlp5              7.14        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\nconf-perl           2           Virtual package relying on perl\ncoq                 8.8.1       Formal proof management system\nnum                 1.4         The legacy Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.09.1      The OCaml compiler (virtual package)\nocaml-base-compiler 4.09.1      Official release 4.09.1\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.3       A library manager for OCaml\n# opam file:\nopam-version: "2.0"\nmaintainer: "matej.kosik@inria.fr"\nhomepage: "https://github.com/coq-contribs/projective-geometry"\nlicense: "Proprietary"\nbuild: [make "-j%{jobs}%"]\ninstall: [make "install"]\nremove: ["rm" "-R" "%{lib}%/coq/user-contrib/ProjectiveGeometry"]\ndepends: [\n  "ocaml"\n  "coq" {>= "8.5" & < "8.6~"}\n]\ntags: [\n  "keyword:geometry"\n  "keyword:projective"\n  "keyword:Fano"\n  "keyword:homogeneous coordinates model"\n  "keyword:flat"\n  "keyword:rank"\n  "keyword:Desargues"\n  "keyword:Moulton"\n  "category:Mathematics/Geometry/General"\n  "date:2009-10"\n]\nauthors: [ "Nicolas Magaud <Nicolas.Magaud@lsiit-cnrs.unistra.fr>" "Julien Narboux <Julien.Narboux@lsiit-cnrs.unistra.fr>" "Pascal Schreck <Pascal.Schreck@lsiit-cnrs.unistra.fr>" ]\nbug-reports: "https://github.com/coq-contribs/projective-geometry/issues"\ndev-repo: "git+https://github.com/coq-contribs/projective-geometry.git"\nsynopsis: "Projective Geometry"\ndescription: """\nThis contributions contains elements of formalization of projective geometry.\nIn the plane:\nTwo axiom systems are shown equivalent. We prove some results about the\ndecidability of the the incidence and equality predicates. The classic\nnotion of duality between points and lines is formalized thanks to a\nfunctor. The notion of 'flat' is defined and flats are characterized.\nFano's plane, the smallest projective plane is defined. We show that Fano's plane is desarguesian.\nIn the space:\nWe prove Desargues' theorem."""\nflags: light-uninstall\nurl {\n  src:\n    "https://github.com/coq-contribs/projective-geometry/archive/v8.5.0.tar.gz"\n  checksum: "md5=b538b6b3caec10362391436a1f91d9c9"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-projective-geometry.8.5.0 coq.8.8.1
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.8.1).\nThe following dependencies couldn't be met:\n  - coq-projective-geometry -> coq < 8.6~ -> ocaml < 4.06.0\n      base of this switch (use `--unlock-base' to force)\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-projective-geometry.8.5.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "43f99f6705bde8fc87362a78d175e415", "timestamp": "", "source": "github", "line_count": 183, "max_line_length": 228, "avg_line_length": 42.295081967213115, "alnum_prop": 0.5683462532299741, "repo_name": "coq-bench/coq-bench.github.io", "id": "f19569dc00d6c5044e14382b0c17dcdc35b073be", "size": "7765", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.09.1-2.0.6/released/8.8.1/projective-geometry/8.5.0.html", "mode": "33188", "license": "mit", "language": []}} {"text": "@interface DemoViewController ()\n\n@end\n\n@implementation DemoViewController\n\n- (void)viewDidLoad {\n [super viewDidLoad];\n}\n\n- (void)didReceiveMemoryWarning {\n [super didReceiveMemoryWarning];\n}\n\n- (IBAction)openCalendar:(id)sender {\n [self openCalendarVC:self];\n}\n\n\n- (void)openCalendarVC:(UIViewController *)vc {\n if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) {\n UIViewController *viewController = [[UIStoryboard storyboardWithName:@\"main-iPad\" bundle:NULL] instantiateViewControllerWithIdentifier:@\"MainNav\"];\n viewController.modalTransitionStyle = UIModalTransitionStyleCoverVertical;\n \n [vc presentViewController:viewController animated:YES completion:nil];\n } else {\n UIViewController *viewController = [[UIStoryboard storyboardWithName:@\"main-iPhone\" bundle:NULL] instantiateViewControllerWithIdentifier:@\"MainNav\"];\n viewController.modalTransitionStyle = UIModalTransitionStyleCoverVertical;\n \n [vc presentViewController:viewController animated:YES completion:nil];\n }\n}\n\n\n@end\n", "meta": {"content_hash": "f9a06995cf8dd0234ef62d7a3c671a5f", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 157, "avg_line_length": 30.571428571428573, "alnum_prop": 0.7439252336448599, "repo_name": "george-zergy/Calendar", "id": "f30b4bcd91848d3d3216afcc6ff06dd941ef9209", "size": "1251", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Demo/Other/DemoViewController.m", "mode": "33188", "license": "mit", "language": [{"name": "Objective-C", "bytes": "551753"}, {"name": "Ruby", "bytes": "1577"}]}} {"text": "package club.zhcs.thunder.controller.admin.log;\n\nimport org.apache.shiro.authz.annotation.RequiresRoles;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.stereotype.Controller;\nimport org.springframework.ui.Model;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\n\nimport club.zhcs.thunder.biz.log.LoginLogService;\nimport club.zhcs.thunder.controller.base.BaseController;\nimport club.zhcs.thunder.domain.log.LoginLog;\nimport club.zhcs.titans.utils.db.Pager;\nimport club.zhcs.titans.utils.db.Result;\n\n/**\n * \n * @author admin\n *\n * @email kerbores@gmail.com\n *\n */\n@Controller\n@RequestMapping(\"login\")\npublic class LoginLogController extends BaseController {\n\n\t@Autowired\n\tLoginLogService loginLogService;\n\n\t@RequestMapping(\"list\")\n\t@RequiresRoles(\"admin\")\n\tpublic String list(@RequestParam(value = \"page\", defaultValue = \"1\") int page, Model model) {\n\t\tPager pager = loginLogService.searchByPage(_fixPage(page));\n\t\tpager.setUrl(_base() + \"/login/list\");\n\t\tmodel.addAttribute(\"obj\", Result.success().addData(\"pager\", pager).setTitle(\" \u767b\u5f55\u65e5\u5fd7\u5217\u8868\"));\n\t\treturn \"pages/log/login/list\";\n\t}\n\n\t@RequestMapping(\"search\")\n\t@RequiresRoles(\"admin\")\n\tpublic String search(@RequestParam(value = \"page\", defaultValue = \"1\") int page, @RequestParam(\"key\") String key, Model model) {\n\t\tPager pager = loginLogService.searchByKeyAndPage(_fixSearchKey(key), _fixPage(page), \"account\", \"ip\");\n\t\tpager.setUrl(_base() + \"/login/search\");\n\t\tpager.addParas(\"key\", key);\n\t\tmodel.addAttribute(\"obj\", Result.success().addData(\"pager\", pager).setTitle(\" \u767b\u5f55\u65e5\u5fd7\u5217\u8868\"));\n\t\treturn \"pages/log/login/list\";\n\t}\n\n}\n", "meta": {"content_hash": "df9921683263469faf05857387900681", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 129, "avg_line_length": 34.83673469387755, "alnum_prop": 0.7551259519625073, "repo_name": "Kerbores/spring-thunder", "id": "5e7f1b6c239cfc9c61ab48b1e68f6e67aa7a7765", "size": "1731", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spring-thunder/src/main/java/club/zhcs/thunder/controller/admin/log/LoginLogController.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "4136035"}, {"name": "CoffeeScript", "bytes": "83631"}, {"name": "HTML", "bytes": "1402832"}, {"name": "Java", "bytes": "157946"}, {"name": "JavaScript", "bytes": "18705408"}, {"name": "PHP", "bytes": "12187"}, {"name": "Shell", "bytes": "444"}]}} {"text": "origin = $object;\n $response->target = new ReflectionClass($object);\n return $response;\n }\n\n public function __get($name)\n {\n $property = $this->getProperty($name);\n $value = $property->getValue($this->origin);\n return $value;\n }\n\n public function __set($name, $value)\n {\n $property = $this->getProperty($name);\n $property->setValue($this->origin, $value);\n return true;\n }\n\n protected function getProperty($name)\n {\n if (!$this->target->hasProperty($name)) {\n $message = sprintf('Property not found on object %s.', get_class($this->origin));\n throw new OutOfBoundsException($message);\n }\n\n /** @var ReflectionProperty $property */\n $property = $this->target->getProperty($name);\n $property->setAccessible(true);\n\n return $property;\n }\n\n public function call($name, array $args = [])\n {\n if (!$this->target->hasMethod($name)) {\n $message = sprintf('Method not found on object %s.', get_class($this->origin));\n throw new BadMethodCallException($message);\n }\n\n $method = $this->target->getMethod($name);\n $method->setAccessible(true);\n $response = $method->invokeArgs($this->origin, $args);\n\n return $response;\n }\n}\n", "meta": {"content_hash": "355cce27bc3a0de459312cc06660848c", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 104, "avg_line_length": 27.380281690140844, "alnum_prop": 0.5720164609053497, "repo_name": "bogdananton/php-class-helper", "id": "cc9796b16242c263b39450bc72da0e6205c12628", "size": "1944", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ClassHelper.php", "mode": "33188", "license": "mit", "language": [{"name": "PHP", "bytes": "8106"}]}} {"text": "'use strict';\n\nif (process.platform === 'darwin') {\n\tmodule.exports = require('./lib/osx');\n\tmodule.exports.sync = require('./lib/osx').sync;\n} else if (process.platform === 'win32') {\n\tmodule.exports = require('./lib/win');\n\tmodule.exports.sync = require('./lib/win').sync;\n} else {\n\tmodule.exports = require('./lib/linux');\n\tmodule.exports.sync = require('./lib/linux').sync;\n}\n", "meta": {"content_hash": "118d8353fb769b5443149d540d33898c", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 51, "avg_line_length": 31.666666666666668, "alnum_prop": 0.6473684210526316, "repo_name": "kevva/wifi-name", "id": "5052fd83072d797bf2c203ec5708d695982293e2", "size": "380", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.js", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "2717"}]}} {"text": "\n\n\n\n\nUntypedResultSet.Row (apache-cassandra API)\n\n\n\n\n\n\n\n\n
\n\n\n\n
\n\n
\n
\n
    \n
  • Summary: 
  • \n
  • Nested | 
  • \n
  • Field | 
  • \n
  • Constr | 
  • \n
  • Method
  • \n
\n\n
\n\n\n
\n\n\n
\n
org.apache.cassandra.cql3
\n

Class UntypedResultSet.Row

\n
\n
\n
    \n
  • java.lang.Object
  • \n
  • \n
      \n
    • org.apache.cassandra.cql3.UntypedResultSet.Row
    • \n
    \n
  • \n
\n
\n
    \n
  • \n
    \n
    Enclosing class:
    \n
    UntypedResultSet
    \n
    \n
    \n
    \n
    public static class UntypedResultSet.Row\nextends java.lang.Object
    \n
  • \n
\n
\n
\n
    \n
  • \n\n\n\n
      \n
    • \n\n\n

      Method Summary

      \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
      Methods 
      Modifier and TypeMethod and Description
      booleangetBoolean(java.lang.String column) 
      java.nio.ByteBuffergetBytes(java.lang.String column) 
      java.util.List<ColumnSpecification>getColumns() 
      doublegetDouble(java.lang.String column) 
      java.net.InetAddressgetInetAddress(java.lang.String column) 
      intgetInt(java.lang.String column) 
      <T> java.util.List<T>getList(java.lang.String column,\n AbstractType<T> type) 
      longgetLong(java.lang.String column) 
      <K,V> java.util.Map<K,V>getMap(java.lang.String column,\n AbstractType<K> keyType,\n AbstractType<V> valueType) 
      <T> java.util.Set<T>getSet(java.lang.String column,\n AbstractType<T> type) 
      java.lang.StringgetString(java.lang.String column) 
      java.util.DategetTimestamp(java.lang.String column) 
      java.util.UUIDgetUUID(java.lang.String column) 
      booleanhas(java.lang.String column) 
      java.lang.StringtoString() 
      \n
        \n
      • \n\n\n

        Methods inherited from class java.lang.Object

        \nclone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait
      • \n
      \n
    • \n
    \n
  • \n
\n
\n
\n
    \n
  • \n\n
      \n
    • \n\n\n

      Constructor Detail

      \n\n\n\n
        \n
      • \n

        UntypedResultSet.Row

        \n
        public UntypedResultSet.Row(java.util.Map<java.lang.String,java.nio.ByteBuffer> data)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        UntypedResultSet.Row

        \n
        public UntypedResultSet.Row(java.util.List<ColumnSpecification> names,\n                    java.util.List<java.nio.ByteBuffer> columns)
        \n
      • \n
      \n
    • \n
    \n\n
      \n
    • \n\n\n

      Method Detail

      \n\n\n\n
        \n
      • \n

        has

        \n
        public boolean has(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getString

        \n
        public java.lang.String getString(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getBoolean

        \n
        public boolean getBoolean(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getInt

        \n
        public int getInt(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getDouble

        \n
        public double getDouble(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getBytes

        \n
        public java.nio.ByteBuffer getBytes(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getInetAddress

        \n
        public java.net.InetAddress getInetAddress(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getUUID

        \n
        public java.util.UUID getUUID(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getTimestamp

        \n
        public java.util.Date getTimestamp(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getLong

        \n
        public long getLong(java.lang.String column)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getSet

        \n
        public <T> java.util.Set<T> getSet(java.lang.String column,\n                          AbstractType<T> type)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getList

        \n
        public <T> java.util.List<T> getList(java.lang.String column,\n                            AbstractType<T> type)
        \n
      • \n
      \n\n\n\n
        \n
      • \n

        getMap

        \n
        public <K,V> java.util.Map<K,V> getMap(java.lang.String column,\n                              AbstractType<K> keyType,\n                              AbstractType<V> valueType)
        \n
      • \n
      \n\n\n\n\n\n\n\n
        \n
      • \n

        toString

        \n
        public java.lang.String toString()
        \n
        \n
        Overrides:
        \n
        toString in class java.lang.Object
        \n
        \n
      • \n
      \n
    • \n
    \n
  • \n
\n
\n
\n\n\n\n
\n\n\n\n
\n\n
\n
\n
    \n
  • Summary: 
  • \n
  • Nested | 
  • \n
  • Field | 
  • \n
  • Constr | 
  • \n
  • Method
  • \n
\n\n
\n\n\n
\n\n

Copyright © 2015 The Apache Software Foundation

\n\n\n", "meta": {"content_hash": "ddfb199bf005eb9a6ee375accbe38991", "timestamp": "", "source": "github", "line_count": 469, "max_line_length": 369, "avg_line_length": 40.61620469083156, "alnum_prop": 0.660979578980524, "repo_name": "anuragkapur/cassandra-2.1.2-ak-skynet", "id": "8975ec467a0c2750dfff936bb29ea8033dbbcde8", "size": "19049", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "apache-cassandra-2.0.15/javadoc/org/apache/cassandra/cql3/UntypedResultSet.Row.html", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "59670"}, {"name": "PowerShell", "bytes": "37758"}, {"name": "Python", "bytes": "622552"}, {"name": "Shell", "bytes": "100474"}, {"name": "Thrift", "bytes": "78926"}]}} {"text": "package com.orientechnologies.orient.server.distributed;\n\nimport static org.junit.Assert.assertEquals;\nimport static org.junit.Assert.assertTrue;\n\nimport com.orientechnologies.orient.core.config.OGlobalConfiguration;\nimport com.orientechnologies.orient.core.db.ODatabaseSession;\nimport com.orientechnologies.orient.core.db.OrientDB;\nimport com.orientechnologies.orient.core.db.OrientDBConfig;\nimport com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;\nimport com.orientechnologies.orient.core.metadata.schema.OClass;\nimport com.orientechnologies.orient.core.record.OVertex;\nimport com.orientechnologies.orient.core.sql.executor.OResult;\nimport com.orientechnologies.orient.core.sql.executor.OResultSet;\nimport com.orientechnologies.orient.setup.SetupConfig;\nimport com.orientechnologies.orient.setup.TestSetup;\nimport com.orientechnologies.orient.setup.TestSetupUtil;\nimport com.orientechnologies.orient.setup.configs.SimpleDServerConfig;\nimport org.junit.After;\nimport org.junit.Before;\nimport org.junit.Test;\n\npublic class SimpleQueryDistributedIT {\n\n private TestSetup setup;\n private SetupConfig config;\n private String server0, server1, server2;\n\n private OrientDB remote;\n private ODatabaseSession session;\n\n @Before\n public void before() throws Exception {\n config = new SimpleDServerConfig();\n server0 = SimpleDServerConfig.SERVER0;\n server1 = SimpleDServerConfig.SERVER1;\n server2 = SimpleDServerConfig.SERVER2;\n setup = TestSetupUtil.create(config);\n setup.setup();\n\n remote = setup.createRemote(server0, \"root\", \"test\", OrientDBConfig.defaultConfig());\n remote.execute(\n \"create database ? plocal users(admin identified by 'admin' role admin)\", \"test\");\n session = remote.open(\"test\", \"admin\", \"admin\");\n }\n\n @Test\n public void test() {\n OVertex vertex = session.newVertex(\"V\");\n vertex.setProperty(\"name\", \"one\");\n session.save(vertex);\n\n // Query with SQL\n OResultSet res = session.query(\"select from V\");\n assertTrue(res.hasNext());\n assertEquals(res.next().getProperty(\"name\"), \"one\");\n\n // Query with script\n res = session.execute(\"sql\", \"select from V\");\n assertTrue(res.hasNext());\n assertEquals(res.next().getProperty(\"name\"), \"one\");\n\n // Query order by\n OClass v2 = session.createVertexClass(\"V2\");\n int records = (OGlobalConfiguration.QUERY_REMOTE_RESULTSET_PAGE_SIZE.getValueAsInteger() + 10);\n for (int i = 0; i < records; i++) {\n vertex = session.newVertex(\"V2\");\n vertex.setProperty(\"name\", \"one\");\n vertex.setProperty(\"pos\", i);\n session.save(vertex);\n }\n\n res = session.query(\"select from V2 order by pos\");\n for (int i = 0; i < records; i++) {\n assertTrue(res.hasNext());\n OResult ele = res.next();\n assertEquals((int) ele.getProperty(\"pos\"), i);\n assertEquals(ele.getProperty(\"name\"), \"one\");\n }\n }\n\n @After\n public void after() throws InterruptedException {\n System.out.println(\"Tearing down test setup.\");\n try {\n if (remote != null) {\n remote.drop(\"test\");\n remote.close();\n }\n } finally {\n setup.teardown();\n ODatabaseDocumentTx.closeAll();\n }\n }\n}\n", "meta": {"content_hash": "cb9f578fcbbed6ae08f8256a46d1f433", "timestamp": "", "source": "github", "line_count": 95, "max_line_length": 99, "avg_line_length": 33.56842105263158, "alnum_prop": 0.7118218877391032, "repo_name": "orientechnologies/orientdb", "id": "b700fbd44b868ed811e4a17f226b1c64c41391fc", "size": "3189", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "distributed/src/test/java/com/orientechnologies/orient/server/distributed/SimpleQueryDistributedIT.java", "mode": "33261", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "19302"}, {"name": "Dockerfile", "bytes": "705"}, {"name": "Gnuplot", "bytes": "1245"}, {"name": "Groovy", "bytes": "7913"}, {"name": "HTML", "bytes": "5750"}, {"name": "Java", "bytes": "26588383"}, {"name": "JavaScript", "bytes": "259"}, {"name": "PLpgSQL", "bytes": "54881"}, {"name": "Shell", "bytes": "33650"}]}} {"text": "\npackage org.thymeleaf.spring6.processor;\n\nimport org.springframework.util.StringUtils;\nimport org.thymeleaf.context.ITemplateContext;\nimport org.thymeleaf.engine.AttributeDefinition;\nimport org.thymeleaf.engine.AttributeDefinitions;\nimport org.thymeleaf.engine.AttributeName;\nimport org.thymeleaf.engine.IAttributeDefinitionsAware;\nimport org.thymeleaf.exceptions.TemplateProcessingException;\nimport org.thymeleaf.model.IProcessableElementTag;\nimport org.thymeleaf.processor.element.AbstractAttributeTagProcessor;\nimport org.thymeleaf.processor.element.IElementTagStructureHandler;\nimport org.thymeleaf.spring6.context.IThymeleafBindStatus;\nimport org.thymeleaf.spring6.naming.SpringContextVariableNames;\nimport org.thymeleaf.spring6.util.FieldUtils;\nimport org.thymeleaf.templatemode.TemplateMode;\nimport org.thymeleaf.util.Validate;\n\n/**\n * Binds an input property with the value in the form's backing bean.\n *

\n * Values for {@code th:field} attributes must be selection expressions\n * {@code (*{...})}, as they will be evaluated on the form backing bean and not\n * on the context variables (model attributes in Spring MVC jargon).\n *\n * @author Daniel Fernández\n * @since 3.0.3\n */\npublic abstract class AbstractSpringFieldTagProcessor\n extends AbstractAttributeTagProcessor\n implements IAttributeDefinitionsAware {\n\n\n public static final int ATTR_PRECEDENCE = 1700;\n public static final String ATTR_NAME = \"field\";\n\n private static final TemplateMode TEMPLATE_MODE = TemplateMode.HTML;\n\n protected static final String INPUT_TAG_NAME = \"input\";\n protected static final String SELECT_TAG_NAME = \"select\";\n protected static final String OPTION_TAG_NAME = \"option\";\n protected static final String TEXTAREA_TAG_NAME = \"textarea\";\n\n protected static final String ID_ATTR_NAME = \"id\";\n protected static final String TYPE_ATTR_NAME = \"type\";\n protected static final String NAME_ATTR_NAME = \"name\";\n protected static final String VALUE_ATTR_NAME = \"value\";\n protected static final String CHECKED_ATTR_NAME = \"checked\";\n protected static final String SELECTED_ATTR_NAME = \"selected\";\n protected static final String DISABLED_ATTR_NAME = \"disabled\";\n protected static final String MULTIPLE_ATTR_NAME = \"multiple\";\n\n private AttributeDefinition discriminatorAttributeDefinition;\n protected AttributeDefinition idAttributeDefinition;\n protected AttributeDefinition typeAttributeDefinition;\n protected AttributeDefinition nameAttributeDefinition;\n protected AttributeDefinition valueAttributeDefinition;\n protected AttributeDefinition checkedAttributeDefinition;\n protected AttributeDefinition selectedAttributeDefinition;\n protected AttributeDefinition disabledAttributeDefinition;\n protected AttributeDefinition multipleAttributeDefinition;\n\n\n\n\n\n private final String discriminatorAttrName;\n private final String[] discriminatorAttrValues;\n private final boolean removeAttribute;\n\n\n public AbstractSpringFieldTagProcessor(\n final String dialectPrefix, final String elementName,\n final String discriminatorAttrName, final String[] discriminatorAttrValues,\n final boolean removeAttribute) {\n super(TEMPLATE_MODE, dialectPrefix, elementName, false, ATTR_NAME, true, ATTR_PRECEDENCE, false);\n this.discriminatorAttrName = discriminatorAttrName;\n this.discriminatorAttrValues = discriminatorAttrValues;\n this.removeAttribute = removeAttribute;\n }\n\n\n\n\n public void setAttributeDefinitions(final AttributeDefinitions attributeDefinitions) {\n Validate.notNull(attributeDefinitions, \"Attribute Definitions cannot be null\");\n // We precompute the AttributeDefinitions in order to being able to use much\n // faster methods for setting/replacing attributes on the ElementAttributes implementation\n this.discriminatorAttributeDefinition =\n (this.discriminatorAttrName != null? attributeDefinitions.forName(TEMPLATE_MODE, this.discriminatorAttrName) : null);\n this.idAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, ID_ATTR_NAME);\n this.typeAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, TYPE_ATTR_NAME);\n this.nameAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, NAME_ATTR_NAME);\n this.valueAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, VALUE_ATTR_NAME);\n this.checkedAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, CHECKED_ATTR_NAME);\n this.selectedAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, SELECTED_ATTR_NAME);\n this.disabledAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, DISABLED_ATTR_NAME);\n this.multipleAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, MULTIPLE_ATTR_NAME);\n }\n\n\n\n\n private boolean matchesDiscriminator(final IProcessableElementTag tag) {\n\n if (this.discriminatorAttrName == null) {\n return true;\n }\n final boolean hasDiscriminatorAttr = tag.hasAttribute(this.discriminatorAttributeDefinition.getAttributeName());\n if (this.discriminatorAttrValues == null || this.discriminatorAttrValues.length == 0) {\n return hasDiscriminatorAttr;\n }\n final String discriminatorTagValue =\n (hasDiscriminatorAttr? tag.getAttributeValue(this.discriminatorAttributeDefinition.getAttributeName()) : null);\n for (int i = 0; i < this.discriminatorAttrValues.length; i++) {\n final String discriminatorAttrValue = this.discriminatorAttrValues[i];\n if (discriminatorAttrValue == null) {\n if (!hasDiscriminatorAttr || discriminatorTagValue == null) {\n return true;\n }\n } else if (discriminatorAttrValue.equals(discriminatorTagValue)) {\n return true;\n }\n }\n return false;\n\n }\n\n\n\n @Override\n protected void doProcess(\n final ITemplateContext context,\n final IProcessableElementTag tag,\n final AttributeName attributeName, final String attributeValue,\n final IElementTagStructureHandler structureHandler) {\n\n /*\n * First thing to check is whether this processor really matches, because so far we have asked the engine only\n * to match per attribute (th:field) and host tag (input, select, option...) but we still don't know if the\n * match is complete because we might still need to assess for example that the 'type' attribute has the\n * correct value. For example, the same processor will not be executing on \n * and on \n */\n if (!matchesDiscriminator(tag)) {\n // Note in this case we do not have to remove the th:field attribute because the correct processor is still\n // to be executed!\n return;\n }\n\n if (this.removeAttribute) {\n structureHandler.removeAttribute(attributeName);\n }\n\n final IThymeleafBindStatus bindStatus = FieldUtils.getBindStatus(context, attributeValue);\n\n if (bindStatus == null) {\n throw new TemplateProcessingException(\n \"Cannot process attribute '\" + attributeName + \"': no associated BindStatus could be found for \" +\n \"the intended form binding operations. This can be due to the lack of a proper management of the \" +\n \"Spring RequestContext, which is usually done through the ThymeleafView or ThymeleafReactiveView\");\n }\n\n // We set the BindStatus into a local variable just in case we have more BindStatus-related processors to\n // be applied for the same tag, like for example a th:errorclass\n structureHandler.setLocalVariable(SpringContextVariableNames.THYMELEAF_FIELD_BIND_STATUS, bindStatus);\n\n doProcess(context, tag, attributeName, attributeValue, bindStatus, structureHandler);\n\n }\n\n\n\n\n protected abstract void doProcess(\n final ITemplateContext context,\n final IProcessableElementTag tag,\n final AttributeName attributeName,\n final String attributeValue,\n final IThymeleafBindStatus bindStatus,\n final IElementTagStructureHandler structureHandler);\n\n\n\n\n\n // This method is designed to be called from the diverse subclasses\n protected final String computeId(\n final ITemplateContext context,\n final IProcessableElementTag tag,\n final String name, final boolean sequence) {\n\n String id = tag.getAttributeValue(this.idAttributeDefinition.getAttributeName());\n if (!org.thymeleaf.util.StringUtils.isEmptyOrWhitespace(id)) {\n return (StringUtils.hasText(id) ? id : null);\n }\n\n id = FieldUtils.idFromName(name);\n if (sequence) {\n final Integer count = context.getIdentifierSequences().getAndIncrementIDSeq(id);\n return id + count.toString();\n }\n return id;\n\n }\n\n\n\n\n}\n", "meta": {"content_hash": "e583ddbb00545fad6c919b83b65d762a", "timestamp": "", "source": "github", "line_count": 211, "max_line_length": 133, "avg_line_length": 43.5260663507109, "alnum_prop": 0.7192944250871081, "repo_name": "thymeleaf/thymeleaf-spring", "id": "8ab55cd158f10b42c25bcdbda2c2cd664f5cc721", "size": "10010", "binary": false, "copies": "2", "ref": "refs/heads/3.1-master", "path": "thymeleaf-spring6/src/main/java/org/thymeleaf/spring6/processor/AbstractSpringFieldTagProcessor.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "1170794"}]}} {"text": "\n\npackage org.apache.zookeeper.server;\n\nimport java.io.BufferedWriter;\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.PrintWriter;\nimport java.io.Writer;\nimport java.net.InetAddress;\nimport java.net.InetSocketAddress;\nimport java.nio.ByteBuffer;\nimport java.nio.channels.CancelledKeyException;\nimport java.nio.channels.SelectionKey;\nimport java.nio.channels.SocketChannel;\nimport java.security.cert.Certificate;\nimport java.util.Queue;\nimport java.util.concurrent.LinkedBlockingQueue;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.atomic.AtomicInteger;\n\nimport org.apache.jute.BinaryInputArchive;\nimport org.apache.jute.BinaryOutputArchive;\nimport org.apache.jute.Record;\nimport org.apache.zookeeper.WatchedEvent;\nimport org.apache.zookeeper.data.Id;\nimport org.apache.zookeeper.proto.ReplyHeader;\nimport org.apache.zookeeper.proto.RequestHeader;\nimport org.apache.zookeeper.proto.WatcherEvent;\nimport org.apache.zookeeper.server.NIOServerCnxnFactory.SelectorThread;\nimport org.apache.zookeeper.server.command.CommandExecutor;\nimport org.apache.zookeeper.server.command.FourLetterCommands;\nimport org.apache.zookeeper.server.command.SetTraceMaskCommand;\nimport org.apache.zookeeper.server.command.NopCommand;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * This class handles communication with clients using NIO. There is one per\n * client, but only one thread doing the communication.\n */\npublic class NIOServerCnxn extends ServerCnxn {\n private static final Logger LOG = LoggerFactory.getLogger(NIOServerCnxn.class);\n\n private final NIOServerCnxnFactory factory;\n\n private final SocketChannel sock;\n\n private final SelectorThread selectorThread;\n\n private final SelectionKey sk;\n\n private boolean initialized;\n\n private final ByteBuffer lenBuffer = ByteBuffer.allocate(4);\n\n private ByteBuffer incomingBuffer = lenBuffer;\n\n private final Queue outgoingBuffers =\n new LinkedBlockingQueue();\n\n private int sessionTimeout;\n\n private final ZooKeeperServer zkServer;\n\n /**\n * The number of requests that have been submitted but not yet responded to.\n */\n private final AtomicInteger outstandingRequests = new AtomicInteger(0);\n\n /**\n * This is the id that uniquely identifies the session of a client. Once\n * this session is no longer active, the ephemeral nodes will go away.\n */\n private long sessionId;\n\n private final int outstandingLimit;\n\n public NIOServerCnxn(ZooKeeperServer zk, SocketChannel sock,\n SelectionKey sk, NIOServerCnxnFactory factory,\n SelectorThread selectorThread) throws IOException {\n this.zkServer = zk;\n this.sock = sock;\n this.sk = sk;\n this.factory = factory;\n this.selectorThread = selectorThread;\n if (this.factory.login != null) {\n this.zooKeeperSaslServer = new ZooKeeperSaslServer(factory.login);\n }\n if (zk != null) {\n outstandingLimit = zk.getGlobalOutstandingLimit();\n } else {\n outstandingLimit = 1;\n }\n sock.socket().setTcpNoDelay(true);\n /* set socket linger to false, so that socket close does not block */\n sock.socket().setSoLinger(false, -1);\n InetAddress addr = ((InetSocketAddress) sock.socket()\n .getRemoteSocketAddress()).getAddress();\n addAuthInfo(new Id(\"ip\", addr.getHostAddress()));\n this.sessionTimeout = factory.sessionlessCnxnTimeout;\n }\n\n /* Send close connection packet to the client, doIO will eventually\n * close the underlying machinery (like socket, selectorkey, etc...)\n */\n public void sendCloseSession() {\n sendBuffer(ServerCnxnFactory.closeConn);\n }\n\n /**\n * send buffer without using the asynchronous\n * calls to selector and then close the socket\n * @param bb\n */\n void sendBufferSync(ByteBuffer bb) {\n try {\n /* configure socket to be blocking\n * so that we dont have to do write in\n * a tight while loop\n */\n if (bb != ServerCnxnFactory.closeConn) {\n if (sock.isOpen()) {\n sock.configureBlocking(true);\n sock.write(bb);\n }\n packetSent();\n }\n } catch (IOException ie) {\n LOG.error(\"Error sending data synchronously \", ie);\n }\n }\n\n /**\n * sendBuffer pushes a byte buffer onto the outgoing buffer queue for\n * asynchronous writes.\n */\n public void sendBuffer(ByteBuffer bb) {\n if (LOG.isTraceEnabled()) {\n LOG.trace(\"Add a buffer to outgoingBuffers, sk \" + sk\n + \" is valid: \" + sk.isValid());\n }\n outgoingBuffers.add(bb);\n requestInterestOpsUpdate();\n }\n\n /** Read the request payload (everything following the length prefix) */\n private void readPayload() throws IOException, InterruptedException {\n if (incomingBuffer.remaining() != 0) { // have we read length bytes?\n int rc = sock.read(incomingBuffer); // sock is non-blocking, so ok\n if (rc < 0) {\n throw new EndOfStreamException(\n \"Unable to read additional data from client sessionid 0x\"\n + Long.toHexString(sessionId)\n + \", likely client has closed socket\");\n }\n }\n\n if (incomingBuffer.remaining() == 0) { // have we read length bytes?\n packetReceived();\n incomingBuffer.flip();\n if (!initialized) {\n readConnectRequest();\n } else {\n readRequest();\n }\n lenBuffer.clear();\n incomingBuffer = lenBuffer;\n }\n }\n\n /**\n * This boolean tracks whether the connection is ready for selection or\n * not. A connection is marked as not ready for selection while it is\n * processing an IO request. The flag is used to gatekeep pushing interest\n * op updates onto the selector.\n */\n private final AtomicBoolean selectable = new AtomicBoolean(true);\n\n public boolean isSelectable() {\n return sk.isValid() && selectable.get();\n }\n\n public void disableSelectable() {\n selectable.set(false);\n }\n\n public void enableSelectable() {\n selectable.set(true);\n }\n\n private void requestInterestOpsUpdate() {\n if (isSelectable()) {\n selectorThread.addInterestOpsUpdateRequest(sk);\n }\n }\n\n void handleWrite(SelectionKey k) throws IOException, CloseRequestException {\n if (outgoingBuffers.isEmpty()) {\n return;\n }\n\n /*\n * This is going to reset the buffer position to 0 and the\n * limit to the size of the buffer, so that we can fill it\n * with data from the non-direct buffers that we need to\n * send.\n */\n ByteBuffer directBuffer = NIOServerCnxnFactory.getDirectBuffer();\n if (directBuffer == null) {\n ByteBuffer[] bufferList = new ByteBuffer[outgoingBuffers.size()];\n // Use gathered write call. This updates the positions of the\n // byte buffers to reflect the bytes that were written out.\n sock.write(outgoingBuffers.toArray(bufferList));\n\n // Remove the buffers that we have sent\n ByteBuffer bb;\n while ((bb = outgoingBuffers.peek()) != null) {\n if (bb == ServerCnxnFactory.closeConn) {\n throw new CloseRequestException(\"close requested\");\n }\n if (bb.remaining() > 0) {\n break;\n }\n packetSent();\n outgoingBuffers.remove();\n }\n } else {\n directBuffer.clear();\n\n for (ByteBuffer b : outgoingBuffers) {\n if (directBuffer.remaining() < b.remaining()) {\n /*\n * When we call put later, if the directBuffer is to\n * small to hold everything, nothing will be copied,\n * so we've got to slice the buffer if it's too big.\n */\n b = (ByteBuffer) b.slice().limit(\n directBuffer.remaining());\n }\n /*\n * put() is going to modify the positions of both\n * buffers, put we don't want to change the position of\n * the source buffers (we'll do that after the send, if\n * needed), so we save and reset the position after the\n * copy\n */\n int p = b.position();\n directBuffer.put(b);\n b.position(p);\n if (directBuffer.remaining() == 0) {\n break;\n }\n }\n /*\n * Do the flip: limit becomes position, position gets set to\n * 0. This sets us up for the write.\n */\n directBuffer.flip();\n\n int sent = sock.write(directBuffer);\n\n ByteBuffer bb;\n\n // Remove the buffers that we have sent\n while ((bb = outgoingBuffers.peek()) != null) {\n if (bb == ServerCnxnFactory.closeConn) {\n throw new CloseRequestException(\"close requested\");\n }\n if (sent < bb.remaining()) {\n /*\n * We only partially sent this buffer, so we update\n * the position and exit the loop.\n */\n bb.position(bb.position() + sent);\n break;\n }\n packetSent();\n /* We've sent the whole buffer, so drop the buffer */\n sent -= bb.remaining();\n outgoingBuffers.remove();\n }\n }\n }\n\n /**\n * Only used in order to allow testing\n */\n protected boolean isSocketOpen() {\n return sock.isOpen();\n }\n\n /**\n * Handles read/write IO on connection.\n */\n void doIO(SelectionKey k) throws InterruptedException {\n try {\n if (isSocketOpen() == false) {\n LOG.warn(\"trying to do i/o on a null socket for session:0x\"\n + Long.toHexString(sessionId));\n\n return;\n }\n if (k.isReadable()) {\n int rc = sock.read(incomingBuffer);\n if (rc < 0) {\n throw new EndOfStreamException(\n \"Unable to read additional data from client sessionid 0x\"\n + Long.toHexString(sessionId)\n + \", likely client has closed socket\");\n }\n if (incomingBuffer.remaining() == 0) {\n boolean isPayload;\n if (incomingBuffer == lenBuffer) { // start of next request\n incomingBuffer.flip();\n isPayload = readLength(k);\n incomingBuffer.clear();\n } else {\n // continuation\n isPayload = true;\n }\n if (isPayload) { // not the case for 4letterword\n readPayload();\n }\n else {\n // four letter words take care\n // need not do anything else\n return;\n }\n }\n }\n if (k.isWritable()) {\n handleWrite(k);\n\n if (!initialized && !getReadInterest() && !getWriteInterest()) {\n throw new CloseRequestException(\"responded to info probe\");\n }\n }\n } catch (CancelledKeyException e) {\n LOG.warn(\"CancelledKeyException causing close of session 0x\"\n + Long.toHexString(sessionId));\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"CancelledKeyException stack trace\", e);\n }\n close();\n } catch (CloseRequestException e) {\n // expecting close to log session closure\n close();\n } catch (EndOfStreamException e) {\n LOG.warn(e.getMessage());\n // expecting close to log session closure\n close();\n } catch (IOException e) {\n LOG.warn(\"Exception causing close of session 0x\"\n + Long.toHexString(sessionId) + \": \" + e.getMessage());\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"IOException stack trace\", e);\n }\n close();\n }\n }\n\n private void readRequest() throws IOException {\n zkServer.processPacket(this, incomingBuffer);\n }\n\n // Only called as callback from zkServer.processPacket()\n protected void incrOutstandingRequests(RequestHeader h) {\n if (h.getXid() >= 0) {\n outstandingRequests.incrementAndGet();\n // check throttling\n int inProcess = zkServer.getInProcess();\n if (inProcess > outstandingLimit) {\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"Throttling recv \" + inProcess);\n }\n disableRecv();\n }\n }\n }\n\n // returns whether we are interested in writing, which is determined\n // by whether we have any pending buffers on the output queue or not\n private boolean getWriteInterest() {\n return !outgoingBuffers.isEmpty();\n }\n\n // returns whether we are interested in taking new requests, which is\n // determined by whether we are currently throttled or not\n private boolean getReadInterest() {\n return !throttled.get();\n }\n\n private final AtomicBoolean throttled = new AtomicBoolean(false);\n\n // Throttle acceptance of new requests. If this entailed a state change,\n // register an interest op update request with the selector.\n public void disableRecv() {\n if (throttled.compareAndSet(false, true)) {\n requestInterestOpsUpdate();\n }\n }\n\n // Disable throttling and resume acceptance of new requests. If this\n // entailed a state change, register an interest op update request with\n // the selector.\n public void enableRecv() {\n if (throttled.compareAndSet(true, false)) {\n requestInterestOpsUpdate();\n }\n }\n\n private void readConnectRequest() throws IOException, InterruptedException {\n if (!isZKServerRunning()) {\n throw new IOException(\"ZooKeeperServer not running\");\n }\n zkServer.processConnectRequest(this, incomingBuffer);\n initialized = true;\n }\n\n /**\n * This class wraps the sendBuffer method of NIOServerCnxn. It is\n * responsible for chunking up the response to a client. Rather\n * than cons'ing up a response fully in memory, which may be large\n * for some commands, this class chunks up the result.\n */\n private class SendBufferWriter extends Writer {\n private StringBuffer sb = new StringBuffer();\n\n /**\n * Check if we are ready to send another chunk.\n * @param force force sending, even if not a full chunk\n */\n private void checkFlush(boolean force) {\n if ((force && sb.length() > 0) || sb.length() > 2048) {\n sendBufferSync(ByteBuffer.wrap(sb.toString().getBytes()));\n // clear our internal buffer\n sb.setLength(0);\n }\n }\n\n @Override\n public void close() throws IOException {\n if (sb == null) return;\n checkFlush(true);\n sb = null; // clear out the ref to ensure no reuse\n }\n\n @Override\n public void flush() throws IOException {\n checkFlush(true);\n }\n\n @Override\n public void write(char[] cbuf, int off, int len) throws IOException {\n sb.append(cbuf, off, len);\n checkFlush(false);\n }\n }\n /** Return if four letter word found and responded to, otw false **/\n private boolean checkFourLetterWord(final SelectionKey k, final int len)\n throws IOException\n {\n // We take advantage of the limited size of the length to look\n // for cmds. They are all 4-bytes which fits inside of an int\n if (!FourLetterCommands.isKnown(len)) {\n return false;\n }\n\n String cmd = FourLetterCommands.getCommandString(len);\n packetReceived();\n\n /** cancel the selection key to remove the socket handling\n * from selector. This is to prevent netcat problem wherein\n * netcat immediately closes the sending side after sending the\n * commands and still keeps the receiving channel open.\n * The idea is to remove the selectionkey from the selector\n * so that the selector does not notice the closed read on the\n * socket channel and keep the socket alive to write the data to\n * and makes sure to close the socket after its done writing the data\n */\n if (k != null) {\n try {\n k.cancel();\n } catch(Exception e) {\n LOG.error(\"Error cancelling command selection key \", e);\n }\n }\n\n final PrintWriter pwriter = new PrintWriter(\n new BufferedWriter(new SendBufferWriter()));\n\n // ZOOKEEPER-2693: don't execute 4lw if it's not enabled.\n if (!FourLetterCommands.isEnabled(cmd)) {\n LOG.debug(\"Command {} is not executed because it is not in the whitelist.\", cmd);\n NopCommand nopCmd = new NopCommand(pwriter, this, cmd +\n \" is not executed because it is not in the whitelist.\");\n nopCmd.start();\n return true;\n }\n\n LOG.info(\"Processing \" + cmd + \" command from \"\n + sock.socket().getRemoteSocketAddress());\n\n if (len == FourLetterCommands.setTraceMaskCmd) {\n incomingBuffer = ByteBuffer.allocate(8);\n int rc = sock.read(incomingBuffer);\n if (rc < 0) {\n throw new IOException(\"Read error\");\n }\n incomingBuffer.flip();\n long traceMask = incomingBuffer.getLong();\n ZooTrace.setTextTraceLevel(traceMask);\n SetTraceMaskCommand setMask = new SetTraceMaskCommand(pwriter, this, traceMask);\n setMask.start();\n return true;\n } else {\n CommandExecutor commandExecutor = new CommandExecutor();\n return commandExecutor.execute(this, pwriter, len, zkServer, factory);\n }\n }\n\n /** Reads the first 4 bytes of lenBuffer, which could be true length or\n * four letter word.\n *\n * @param k selection key\n * @return true if length read, otw false (wasn't really the length)\n * @throws IOException if buffer size exceeds maxBuffer size\n */\n private boolean readLength(SelectionKey k) throws IOException {\n // Read the length, now get the buffer\n int len = lenBuffer.getInt();\n if (!initialized && checkFourLetterWord(sk, len)) {\n return false;\n }\n if (len < 0 || len > BinaryInputArchive.maxBuffer) {\n throw new IOException(\"Len error \" + len);\n }\n if (!isZKServerRunning()) {\n throw new IOException(\"ZooKeeperServer not running\");\n }\n incomingBuffer = ByteBuffer.allocate(len);\n return true;\n }\n\n /**\n * @return true if the server is running, false otherwise.\n */\n boolean isZKServerRunning() {\n return zkServer != null && zkServer.isRunning();\n }\n\n public long getOutstandingRequests() {\n return outstandingRequests.get();\n }\n\n /*\n * (non-Javadoc)\n *\n * @see org.apache.zookeeper.server.ServerCnxnIface#getSessionTimeout()\n */\n public int getSessionTimeout() {\n return sessionTimeout;\n }\n\n /**\n * Used by \"dump\" 4-letter command to list all connection in\n * cnxnExpiryMap\n */\n @Override\n public String toString() {\n return \"ip: \" + sock.socket().getRemoteSocketAddress() +\n \" sessionId: 0x\" + Long.toHexString(sessionId);\n }\n\n /**\n * Close the cnxn and remove it from the factory cnxns list.\n */\n @Override\n public void close() {\n if (!factory.removeCnxn(this)) {\n return;\n }\n\n if (zkServer != null) {\n zkServer.removeCnxn(this);\n }\n\n if (sk != null) {\n try {\n // need to cancel this selection key from the selector\n sk.cancel();\n } catch (Exception e) {\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"ignoring exception during selectionkey cancel\", e);\n }\n }\n }\n\n closeSock();\n }\n\n /**\n * Close resources associated with the sock of this cnxn.\n */\n private void closeSock() {\n if (sock.isOpen() == false) {\n return;\n }\n\n LOG.info(\"Closed socket connection for client \"\n + sock.socket().getRemoteSocketAddress()\n + (sessionId != 0 ?\n \" which had sessionid 0x\" + Long.toHexString(sessionId) :\n \" (no session established for client)\"));\n closeSock(sock);\n }\n\n /**\n * Close resources associated with a sock.\n */\n public static void closeSock(SocketChannel sock) {\n if (sock.isOpen() == false) {\n return;\n }\n\n try {\n /*\n * The following sequence of code is stupid! You would think that\n * only sock.close() is needed, but alas, it doesn't work that way.\n * If you just do sock.close() there are cases where the socket\n * doesn't actually close...\n */\n sock.socket().shutdownOutput();\n } catch (IOException e) {\n // This is a relatively common exception that we can't avoid\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"ignoring exception during output shutdown\", e);\n }\n }\n try {\n sock.socket().shutdownInput();\n } catch (IOException e) {\n // This is a relatively common exception that we can't avoid\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"ignoring exception during input shutdown\", e);\n }\n }\n try {\n sock.socket().close();\n } catch (IOException e) {\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"ignoring exception during socket close\", e);\n }\n }\n try {\n sock.close();\n } catch (IOException e) {\n if (LOG.isDebugEnabled()) {\n LOG.debug(\"ignoring exception during socketchannel close\", e);\n }\n }\n }\n\n private final static byte fourBytes[] = new byte[4];\n\n /*\n * (non-Javadoc)\n *\n * @see org.apache.zookeeper.server.ServerCnxnIface#sendResponse(org.apache.zookeeper.proto.ReplyHeader,\n * org.apache.jute.Record, java.lang.String)\n */\n @Override\n public void sendResponse(ReplyHeader h, Record r, String tag) {\n try {\n ByteArrayOutputStream baos = new ByteArrayOutputStream();\n // Make space for length\n BinaryOutputArchive bos = BinaryOutputArchive.getArchive(baos);\n try {\n baos.write(fourBytes);\n bos.writeRecord(h, \"header\");\n if (r != null) {\n bos.writeRecord(r, tag);\n }\n baos.close();\n } catch (IOException e) {\n LOG.error(\"Error serializing response\");\n }\n byte b[] = baos.toByteArray();\n ByteBuffer bb = ByteBuffer.wrap(b);\n bb.putInt(b.length - 4).rewind();\n sendBuffer(bb);\n if (h.getXid() > 0) {\n // check throttling\n if (outstandingRequests.decrementAndGet() < 1 ||\n zkServer.getInProcess() < outstandingLimit) {\n enableRecv();\n }\n }\n } catch(Exception e) {\n LOG.warn(\"Unexpected exception. Destruction averted.\", e);\n }\n }\n\n /*\n * (non-Javadoc)\n *\n * @see org.apache.zookeeper.server.ServerCnxnIface#process(org.apache.zookeeper.proto.WatcherEvent)\n */\n @Override\n public void process(WatchedEvent event) {\n ReplyHeader h = new ReplyHeader(-1, -1L, 0);\n if (LOG.isTraceEnabled()) {\n ZooTrace.logTraceMessage(LOG, ZooTrace.EVENT_DELIVERY_TRACE_MASK,\n \"Deliver event \" + event + \" to 0x\"\n + Long.toHexString(this.sessionId)\n + \" through \" + this);\n }\n\n // Convert WatchedEvent to a type that can be sent over the wire\n WatcherEvent e = event.getWrapper();\n\n sendResponse(h, e, \"notification\");\n }\n\n /*\n * (non-Javadoc)\n *\n * @see org.apache.zookeeper.server.ServerCnxnIface#getSessionId()\n */\n @Override\n public long getSessionId() {\n return sessionId;\n }\n\n @Override\n public void setSessionId(long sessionId) {\n this.sessionId = sessionId;\n factory.addSession(sessionId, this);\n }\n\n @Override\n public void setSessionTimeout(int sessionTimeout) {\n this.sessionTimeout = sessionTimeout;\n factory.touchCnxn(this);\n }\n\n @Override\n public int getInterestOps() {\n if (!isSelectable()) {\n return 0;\n }\n int interestOps = 0;\n if (getReadInterest()) {\n interestOps |= SelectionKey.OP_READ;\n }\n if (getWriteInterest()) {\n interestOps |= SelectionKey.OP_WRITE;\n }\n return interestOps;\n }\n\n @Override\n public InetSocketAddress getRemoteSocketAddress() {\n if (sock.isOpen() == false) {\n return null;\n }\n return (InetSocketAddress) sock.socket().getRemoteSocketAddress();\n }\n\n public InetAddress getSocketAddress() {\n if (sock.isOpen() == false) {\n return null;\n }\n return sock.socket().getInetAddress();\n }\n\n @Override\n protected ServerStats serverStats() {\n if (zkServer == null) {\n return null;\n }\n return zkServer.serverStats();\n }\n\n @Override\n public boolean isSecure() {\n return false;\n }\n\n @Override\n public Certificate[] getClientCertificateChain() {\n throw new UnsupportedOperationException(\n \"SSL is unsupported in NIOServerCnxn\");\n }\n\n @Override\n public void setClientCertificateChain(Certificate[] chain) {\n throw new UnsupportedOperationException(\n \"SSL is unsupported in NIOServerCnxn\");\n }\n\n}\n", "meta": {"content_hash": "4539fded6eeb848aa16f35911d782beb", "timestamp": "", "source": "github", "line_count": 803, "max_line_length": 108, "avg_line_length": 34.0, "alnum_prop": 0.5603985056039851, "repo_name": "JiangJiafu/zookeeper", "id": "446438c5363bb87b3a32a5ef7c329f68b1841d18", "size": "28108", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "src/java/main/org/apache/zookeeper/server/NIOServerCnxn.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "5969"}, {"name": "C", "bytes": "520877"}, {"name": "C++", "bytes": "673611"}, {"name": "CMake", "bytes": "6588"}, {"name": "CSS", "bytes": "4016"}, {"name": "HTML", "bytes": "40639"}, {"name": "Java", "bytes": "4272622"}, {"name": "JavaScript", "bytes": "239387"}, {"name": "M4", "bytes": "46802"}, {"name": "Makefile", "bytes": "10860"}, {"name": "Mako", "bytes": "13678"}, {"name": "Perl", "bytes": "33491"}, {"name": "Perl 6", "bytes": "115943"}, {"name": "Python", "bytes": "157028"}, {"name": "Shell", "bytes": "98150"}, {"name": "XS", "bytes": "66352"}, {"name": "XSLT", "bytes": "6024"}]}} {"text": "\n * @copyright 2002-2008 Sebastian Bergmann \n * @license http://www.opensource.org/licenses/bsd-license.php BSD License\n * @version Release: 3.2.9\n * @link http://www.phpunit.de/\n * @since Class available since Release 2.0.0\n */\nclass OutputTestCase extends PHPUnit_Extensions_OutputTestCase\n{\n public function testExpectOutputStringFooActualFoo()\n {\n $this->expectOutputString('foo');\n print 'foo';\n }\n\n public function testExpectOutputStringFooActualBar()\n {\n $this->expectOutputString('foo');\n print 'bar';\n }\n\n public function testExpectOutputRegexFooActualFoo()\n {\n $this->expectOutputRegex('/foo/');\n print 'foo';\n }\n\n public function testExpectOutputRegexFooActualBar()\n {\n $this->expectOutputRegex('/foo/');\n print 'bar';\n }\n}\n?>\n", "meta": {"content_hash": "eba7d138b6a9a31c2db4bb4e185da540", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 78, "avg_line_length": 23.833333333333332, "alnum_prop": 0.6547202797202797, "repo_name": "nevali/shindig", "id": "68abb7d54b8087e4810a2e318280fcefcbf1d1b6", "size": "3222", "binary": false, "copies": "1", "ref": "refs/heads/0.8.1-x", "path": "php/external/PHPUnit/Tests/_files/OutputTestCase.php", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "1313610"}, {"name": "JavaScript", "bytes": "525657"}, {"name": "PHP", "bytes": "856857"}, {"name": "Shell", "bytes": "6919"}]}} {"text": "\"\"\" Models for display visual shapes whose attributes can be associated\nwith data columns from data sources.\n\n\"\"\"\nfrom __future__ import absolute_import\n\nfrom ..plot_object import PlotObject\nfrom ..mixins import FillProps, LineProps, TextProps\nfrom ..enums import Direction, Anchor\nfrom ..properties import AngleSpec, Bool, DistanceSpec, Enum, Float, Include, Instance, NumberSpec, StringSpec\n\nfrom .mappers import LinearColorMapper\n\nclass Glyph(PlotObject):\n \"\"\" Base class for all glyphs/marks/geoms/whatever-you-call-'em in Bokeh.\n\n \"\"\"\n\n visible = Bool(help=\"\"\"\n Whether the glyph should render or not.\n \"\"\")\n\nclass AnnularWedge(Glyph):\n \"\"\" Render annular wedges.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/AnnularWedge.py\n :source-position: none\n\n *source:* `tests/glyphs/AnnularWedge.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates of the center of the annular wedges.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates of the center of the annular wedges.\n \"\"\")\n\n inner_radius = DistanceSpec(\"inner_radius\", help=\"\"\"\n The inner radii of the annular wedges.\n \"\"\")\n\n outer_radius = DistanceSpec(\"outer_radius\", help=\"\"\"\n The outer radii of the annular wedges.\n \"\"\")\n\n start_angle = AngleSpec(\"start_angle\", help=\"\"\"\n The angles to start the annular wedges, in radians, as measured from\n the horizontal.\n \"\"\")\n\n end_angle = AngleSpec(\"end_angle\", help=\"\"\"\n The angles to end the annular wedges, in radians, as measured from\n the horizontal.\n \"\"\")\n\n direction = Enum(Direction, help=\"\"\"\n Which direction to stroke between the start and end angles.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the annular wedges.\n \"\"\")\n\n fill_props = Include(FillProps, use_prefix=False, help=\"\"\"\n The %s values for the annular wedges.\n \"\"\")\n\nclass Annulus(Glyph):\n \"\"\" Render annuli.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Annulus.py\n :source-position: none\n\n *source:* `tests/glyphs/Annulus.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates of the center of the annuli.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates of the center of the annuli.\n \"\"\")\n\n inner_radius = DistanceSpec(\"inner_radius\", help=\"\"\"\n The inner radii of the annuli.\n \"\"\")\n\n outer_radius = DistanceSpec(\"outer_radius\", help=\"\"\"\n The outer radii of the annuli.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the annuli.\n \"\"\")\n\n fill_props = Include(FillProps, use_prefix=False, help=\"\"\"\n The %s values for the annuli.\n \"\"\")\n\nclass Arc(Glyph):\n \"\"\" Render arcs.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Arc.py\n :source-position: none\n\n *source:* `tests/glyphs/Arc.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates of the center of the arcs.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates of the center of the arcs.\n \"\"\")\n\n radius = DistanceSpec(\"radius\", help=\"\"\"\n Radius of the arc.\n \"\"\")\n\n start_angle = AngleSpec(\"start_angle\", help=\"\"\"\n The angles to start the arcs, in radians, as measured from the horizontal.\n \"\"\")\n\n end_angle = AngleSpec(\"end_angle\", help=\"\"\"\n The angles to end the arcs, in radians, as measured from the horizontal.\n \"\"\")\n\n direction = Enum(Direction, help=\"\"\"\n Which direction to stroke between the start and end angles.\n \"\"\")\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the arcs.\n \"\"\")\n\nclass Bezier(Glyph):\n u\"\"\" Render B\u00e9zier curves.\n\n For more information consult the `Wikipedia article for B\u00e9zier curve`_.\n\n .. _Wikipedia article for B\u00e9zier curve: http://en.wikipedia.org/wiki/B\u00e9zier_curve\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Bezier.py\n :source-position: none\n\n *source:* `tests/glyphs/Bezier.py `_\n\n \"\"\"\n\n x0 = NumberSpec(\"x0\", help=\"\"\"\n The x-coordinates of the starting points.\n \"\"\")\n\n y0 = NumberSpec(\"y0\", help=\"\"\"\n The y-coordinates of the starting points.\n \"\"\")\n\n x1 = NumberSpec(\"x1\", help=\"\"\"\n The x-coordinates of the ending points.\n \"\"\")\n\n y1 = NumberSpec(\"y1\", help=\"\"\"\n The y-coordinates of the ending points.\n \"\"\")\n\n cx0 = NumberSpec(\"cx0\", help=\"\"\"\n The x-coordinates of first control points.\n \"\"\")\n\n cy0 = NumberSpec(\"cy0\", help=\"\"\"\n The y-coordinates of first control points.\n \"\"\")\n\n cx1 = NumberSpec(\"cx1\", help=\"\"\"\n The x-coordinates of second control points.\n \"\"\")\n\n cy1 = NumberSpec(\"cy1\", help=\"\"\"\n The y-coordinates of second control points.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=u\"\"\"\n The %s values for the B\u00e9zier curves.\n \"\"\")\n\nclass Gear(Glyph):\n \"\"\" Render gears.\n\n The details and nomenclature concerning gear construction can\n be quite involved. For more information, consult the `Wikipedia\n article for Gear`_.\n\n .. _Wikipedia article for Gear: http://en.wikipedia.org/wiki/Gear\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Gear.py\n :source-position: none\n\n *source:* `tests/glyphs/Gear.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates of the center of the gears.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates of the center of the gears.\n \"\"\")\n\n angle = AngleSpec(default=0, help=\"\"\"\n The angle the gears are rotated from horizontal. [rad]\n \"\"\")\n\n module = NumberSpec(\"module\", help=\"\"\"\n A scaling factor, given by::\n\n m = p / pi\n\n where *p* is the circular pitch, defined as the distance from one\n face of a tooth to the corresponding face of an adjacent tooth on\n the same gear, measured along the pitch circle. [float]\n \"\"\")\n\n teeth = NumberSpec(\"teeth\", help=\"\"\"\n How many teeth the gears have. [int]\n \"\"\")\n\n pressure_angle = NumberSpec(default=20, help= \"\"\"\n The complement of the angle between the direction that the teeth\n exert force on each other, and the line joining the centers of the\n two gears. [deg]\n \"\"\")\n\n # TODO: (bev) evidently missing a test for default value\n shaft_size = NumberSpec(default=0.3, help=\"\"\"\n The central gear shaft size as a percentage of the overall gear\n size. [float]\n \"\"\")\n\n # TODO: (bev) evidently missing a test for default value\n internal = NumberSpec(default=False, help=\"\"\"\n Whether the gear teeth are internal. [bool]\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the gears.\n \"\"\")\n\n fill_props = Include(FillProps, use_prefix=False, help=\"\"\"\n The %s values for the gears.\n \"\"\")\n\nclass Image(Glyph):\n \"\"\" Render images given as scalar data together with a color\n mapper.\n\n \"\"\"\n\n def __init__(self, **kwargs):\n if 'palette' in kwargs and 'color_mapper' in kwargs:\n raise ValueError(\"only one of 'palette' and 'color_mapper' may be specified\")\n elif 'color_mapper' not in kwargs:\n # Use a palette (given or default)\n palette = kwargs.pop('palette', 'Greys9')\n mapper = LinearColorMapper(palette)\n\n reserve_val = kwargs.pop('reserve_val', None)\n if reserve_val is not None:\n mapper.reserve_val = reserve_val\n\n reserve_color = kwargs.pop('reserve_color', None)\n if reserve_color is not None:\n mapper.reserve_color = reserve_color\n\n kwargs['color_mapper'] = mapper\n\n super(Image, self).__init__(**kwargs)\n\n image = NumberSpec(\"image\", help=\"\"\"\n The arrays of scalar data for the images to be colormapped.\n \"\"\")\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates to locate the image anchors.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates to locate the image anchors.\n \"\"\")\n\n dw = DistanceSpec(\"dw\", help=\"\"\"\n The widths of the plot regions that the images will occupy.\n\n .. note::\n This is not the number of pixels that an image is wide.\n That number is fixed by the image itself.\n\n \"\"\")\n\n dh = DistanceSpec(\"dh\", help=\"\"\"\n The height of the plot region that the image will occupy.\n\n .. note::\n This is not the number of pixels that an image is tall.\n That number is fixed by the image itself.\n\n \"\"\")\n\n dilate = Bool(False, help=\"\"\"\n Whether to always round fractional pixel locations in such a way\n as to make the images bigger.\n\n This setting may be useful if pixel rounding errors are causing\n images to have a gap between them, when they should appear flush.\n\n \"\"\")\n\n color_mapper = Instance(LinearColorMapper, help=\"\"\"\n A ``ColorMapper`` to use to map the scalar data from ``image``\n into RGBA values for display.\n\n .. note::\n The color mapping step happens on the client.\n\n \"\"\")\n\n # TODO: (bev) support anchor property for Image\n # ref: https://github.com/bokeh/bokeh/issues/1763\n\nclass ImageRGBA(Glyph):\n \"\"\" Render images given as RGBA data.\n\n \"\"\"\n\n image = NumberSpec(\"image\", help=\"\"\"\n The arrays of RGBA data for the images.\n \"\"\")\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates to locate the image anchors.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates to locate the image anchors.\n \"\"\")\n\n rows = NumberSpec(\"rows\", help=\"\"\"\n The numbers of rows in the images\n \"\"\")\n\n cols = NumberSpec(\"cols\", help=\"\"\"\n The numbers of columns in the images\n \"\"\")\n\n dw = DistanceSpec(\"dw\", help=\"\"\"\n The widths of the plot regions that the images will occupy.\n\n .. note::\n This is not the number of pixels that an image is wide.\n That number is fixed by the image itself.\n\n \"\"\")\n\n dh = DistanceSpec(\"dh\", help=\"\"\"\n The height of the plot region that the image will occupy.\n\n .. note::\n This is not the number of pixels that an image is tall.\n That number is fixed by the image itself.\n\n \"\"\")\n\n dilate = Bool(False, help=\"\"\"\n Whether to always round fractional pixel locations in such a way\n as to make the images bigger.\n\n This setting may be useful if pixel rounding errors are causing\n images to have a gap between them, when they should appear flush.\n \"\"\")\n\n # TODO: (bev) support anchor property for ImageRGBA\n # ref: https://github.com/bokeh/bokeh/issues/1763\n\nclass ImageURL(Glyph):\n \"\"\" Render images loaded from given URLs.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/ImageURL.py\n :source-position: none\n\n *source:* `tests/glyphs/ImageURL.py `_\n\n \"\"\"\n\n url = NumberSpec(\"url\", help=\"\"\"\n The URLs to retrieve images from.\n\n .. note::\n The actual retrieving and loading of the images happens on\n the client.\n\n \"\"\")\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates to locate the image anchors.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates to locate the image anchors.\n \"\"\")\n\n # TODO: (bev) rename to \"dw\" for consistency\n w = DistanceSpec(\"w\", help=\"\"\"\n The widths of the plot regions that the images will occupy.\n\n .. note::\n This is not the number of pixels that an image is wide.\n That number is fixed by the image itself.\n\n .. note::\n This may be renamed to \"dw\" in the future.\n\n \"\"\")\n\n # TODO: (bev) rename to \"dh\" for consistency\n h = DistanceSpec(\"h\", help=\"\"\"\n The height of the plot region that the image will occupy.\n\n .. note::\n This is not the number of pixels that an image is tall.\n That number is fixed by the image itself.\n\n .. note::\n This may be renamed to \"dh\" in the future.\n\n \"\"\")\n\n angle = AngleSpec(default=0, help=\"\"\"\n The angles to rotate the images, in radians as measured from the\n horizontal.\n \"\"\")\n\n global_alpha = Float(1.0, help=\"\"\"\n The opacity that each image is rendered with.\n \"\"\")\n\n dilate = Bool(False, help=\"\"\"\n Whether to always round fractional pixel locations in such a way\n as to make the images bigger.\n\n This setting may be useful if pixel rounding errors are causing\n images to have a gap between them, when they should appear flush.\n \"\"\")\n\n anchor = Enum(Anchor, help=\"\"\"\n What position of the image should be anchored at the `x`, `y`\n coordinates.\n \"\"\")\n\nclass Line(Glyph):\n \"\"\" Render a single line.\n\n .. note::\n The ``Line`` glyph is different from most other glyphs in that\n the vector of values only produces one glyph on the Plot.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Line.py\n :source-position: none\n\n *source:* `tests/glyphs/Line.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates for the points of the line.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates for the points of the line.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the line.\n \"\"\")\n\nclass MultiLine(Glyph):\n \"\"\" Render several lines.\n\n .. note::\n The data for the ``MultiLine`` glyph is different in that the\n vector of values is not a vector of scalars. Rather, it is a\n \"list of lists\".\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/MultiLine.py\n :source-position: none\n\n *source:* `tests/glyphs/MultiLine.py `_\n\n \"\"\"\n xs = NumberSpec(\"xs\", help=\"\"\"\n The x-coordinates for all the lines, given as a \"list of lists\".\n \"\"\")\n\n ys = NumberSpec(\"ys\", help=\"\"\"\n The x-coordinates for all the lines, given as a \"list of lists\".\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the lines.\n \"\"\")\n\nclass Oval(Glyph):\n u\"\"\" Render ovals.\n\n .. note::\n This glyph renders ovals using B\u00e9zier curves, which are similar,\n but not identical to ellipses.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Oval.py\n :source-position: none\n\n *source:* `tests/glyphs/Oval.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates of the centers of the ovals.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates of the centers of the ovals.\n \"\"\")\n\n width = DistanceSpec(\"width\", help=\"\"\"\n The overall widths of each oval.\n \"\"\")\n\n height = DistanceSpec(\"height\", help=\"\"\"\n The overall height of each oval.\n \"\"\")\n\n angle = AngleSpec(\"angle\", help=\"\"\"\n The angle the ovals are rotated from horizontal. [rad]\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the ovals.\n \"\"\")\n\n fill_props = Include(FillProps, use_prefix=False, help=\"\"\"\n The %s values for the ovals.\n \"\"\")\n\nclass Patch(Glyph):\n \"\"\" Render a single patch.\n\n .. note::\n The ``Patch`` glyph is different from most other glyphs in that\n the vector of values only produces one glyph on the Plot.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Patch.py\n :source-position: none\n\n *source:* `tests/glyphs/Patch.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates for the points of the patch.\n\n .. note::\n A patch may comprise multiple polygons. In this case the\n x-coordinates for each polygon should be separated by NaN\n values in the sequence.\n\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates for the points of the patch.\n\n .. note::\n A patch may comprise multiple polygons. In this case the\n y-coordinates for each polygon should be separated by NaN\n values in the sequence.\n\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the patch.\n \"\"\")\n\n fill_props = Include(FillProps, use_prefix=False, help=\"\"\"\n The %s values for the patch.\n \"\"\")\n\nclass Patches(Glyph):\n \"\"\" Render several patches.\n\n .. note::\n The data for the ``Patches`` glyph is different in that the\n vector of values is not a vector of scalars. Rather, it is a\n \"list of lists\".\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Patches.py\n :source-position: none\n\n *source:* `tests/glyphs/Patches.py `_\n\n \"\"\"\n\n xs = NumberSpec(\"xs\", help=\"\"\"\n The x-coordinates for all the patches, given as a \"list of lists\".\n\n .. note::\n Individual patches may comprise multiple polygons. In this case\n the x-coordinates for each polygon should be separated by NaN\n values in the sublists.\n\n \"\"\")\n\n ys = NumberSpec(\"ys\", help=\"\"\"\n The y-coordinates for all the patches, given as a \"list of lists\".\n\n .. note::\n Individual patches may comprise multiple polygons. In this case\n the y-coordinates for each polygon should be separated by NaN\n values in the sublists.\n\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the patches.\n \"\"\")\n\n fill_props = Include(FillProps, use_prefix=False, help=\"\"\"\n The %s values for the patches.\n \"\"\")\n\nclass Quad(Glyph):\n \"\"\" Render axis-aligned quads.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Quad.py\n :source-position: none\n\n *source:* `tests/glyphs/Quad.py `_\n\n \"\"\"\n\n left = NumberSpec(\"left\", help=\"\"\"\n The x-coordinates of the left edges.\n \"\"\")\n\n right = NumberSpec(\"right\", help=\"\"\"\n The x-coordinates of the right edges.\n \"\"\")\n\n bottom = NumberSpec(\"bottom\", help=\"\"\"\n The y-coordinates of the bottom edges.\n \"\"\")\n\n top = NumberSpec(\"top\", help=\"\"\"\n The y-coordinates of the top edges.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the quads.\n \"\"\")\n\n fill_props = Include(FillProps, use_prefix=False, help=\"\"\"\n The %s values for the quads.\n \"\"\")\n\nclass Quadratic(Glyph):\n \"\"\" Render parabolas.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Quadratic.py\n :source-position: none\n\n *source:* `tests/glyphs/Quadratic.py `_\n\n \"\"\"\n\n x0 = NumberSpec(\"x0\", help=\"\"\"\n The x-coordinates of the starting points.\n \"\"\")\n\n y0 = NumberSpec(\"y0\", help=\"\"\"\n The y-coordinates of the starting points.\n \"\"\")\n\n x1 = NumberSpec(\"x1\", help=\"\"\"\n The x-coordinates of the ending points.\n \"\"\")\n\n y1 = NumberSpec(\"y1\", help=\"\"\"\n The y-coordinates of the ending points.\n \"\"\")\n\n cx = NumberSpec(\"cx\", help=\"\"\"\n The x-coordinates of the control points.\n \"\"\")\n\n cy = NumberSpec(\"cy\", help=\"\"\"\n The y-coordinates of the control points.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the parabolas.\n \"\"\")\n\nclass Ray(Glyph):\n \"\"\" Render rays.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Ray.py\n :source-position: none\n\n *source:* `tests/glyphs/Ray.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates to start the rays.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates to start the rays.\n \"\"\")\n\n angle = AngleSpec(\"angle\", help=\"\"\"\n The angles in radians to extend the rays, as measured from the\n horizontal.\n \"\"\")\n\n length = DistanceSpec(\"length\", help=\"\"\"\n The length to extend the ray. Note that this ``length`` defaults\n to screen units.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the rays.\n \"\"\")\n\nclass Rect(Glyph):\n \"\"\" Render rectangles.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Rect.py\n :source-position: none\n\n *source:* `tests/glyphs/Rect.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates of the centers of the rectangles.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates of the centers of the rectangles.\n \"\"\")\n\n width = DistanceSpec(\"width\", help=\"\"\"\n The overall widths of the rectangles.\n \"\"\")\n\n height = DistanceSpec(\"height\", help=\"\"\"\n The overall heights of the rectangles.\n \"\"\")\n\n angle = AngleSpec(\"angle\", help=\"\"\"\n The angles to rotate the rectangles, in radians, as measured from\n the horizontal.\n \"\"\")\n\n dilate = Bool(False, help=\"\"\"\n Whether to always round fractional pixel locations in such a way\n as to make the rectangles bigger.\n\n This setting may be useful if pixel rounding errors are causing\n rectangles to have a gap between them, when they should appear\n flush.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the rectangles.\n \"\"\")\n\n fill_props = Include(FillProps, use_prefix=False, help=\"\"\"\n The %s values for the rectangles.\n \"\"\")\n\nclass Segment(Glyph):\n \"\"\" Render segments.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Segment.py\n :source-position: none\n\n *source:* `tests/glyphs/Segment.py `_\n\n \"\"\"\n\n x0 = NumberSpec(\"x0\", help=\"\"\"\n The x-coordinates of the starting points.\n \"\"\")\n\n y0 = NumberSpec(\"y0\", help=\"\"\"\n The y-coordinates of the starting points.\n \"\"\")\n\n x1 = NumberSpec(\"x1\", help=\"\"\"\n The x-coordinates of the ending points.\n \"\"\")\n\n y1 = NumberSpec(\"y1\", help=\"\"\"\n The y-coordinates of the ending points.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the segments.\n \"\"\")\n\nclass Text(Glyph):\n \"\"\" Render text.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Text.py\n :source-position: none\n\n *source:* `tests/glyphs/Text.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates to locate the text anchors.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates to locate the text anchors.\n \"\"\")\n\n text = StringSpec(\"text\", help=\"\"\"\n The text values to render.\n \"\"\")\n\n angle = AngleSpec(default=0, help=\"\"\"\n The angles to rotate the text, in radians,, as measured from the horizontal.\n \"\"\")\n\n x_offset = NumberSpec(default=0, help=\"\"\"\n Offset values to apply to the x-coordinates.\n\n This is useful, for instance, if it is desired to \"float\" text a fixed\n distance in screen units from a given data position.\n \"\"\")\n\n y_offset = NumberSpec(default=0, help=\"\"\"\n Offset values to apply to the y-coordinates.\n\n This is useful, for instance, if it is desired to \"float\" text a fixed\n distance in screen units from a given data position.\n \"\"\")\n\n text_props = Include(TextProps, use_prefix=False, help=\"\"\"\n The %s values for the text.\n \"\"\")\n\nclass Wedge(Glyph):\n \"\"\" Render wedges.\n\n Example\n -------\n\n .. bokeh-plot:: ../tests/glyphs/Wedge.py\n :source-position: none\n\n *source:* `tests/glyphs/Wedge.py `_\n\n \"\"\"\n\n x = NumberSpec(\"x\", help=\"\"\"\n The x-coordinates of the points of the wedges.\n \"\"\")\n\n y = NumberSpec(\"y\", help=\"\"\"\n The y-coordinates of the points of the wedges.\n \"\"\")\n\n radius = DistanceSpec(\"radius\", help=\"\"\"\n Radii of the wedges.\n \"\"\")\n\n start_angle = AngleSpec(\"start_angle\", help=\"\"\"\n The angles to start the wedges, in radians, as measured from the horizontal.\n \"\"\")\n\n end_angle = AngleSpec(\"end_angle\", help=\"\"\"\n The angles to end the wedges, in radians as measured from the horizontal.\n \"\"\")\n\n direction = Enum(Direction, help=\"\"\"\n Which direction to stroke between the start and end angles.\n \"\"\")\n\n line_props = Include(LineProps, use_prefix=False, help=\"\"\"\n The %s values for the wedges.\n \"\"\")\n\n fill_props = Include(FillProps, use_prefix=False, help=\"\"\"\n The %s values for the wedges.\n \"\"\")\n\n# XXX: allow `from bokeh.models.glyphs import *`\nfrom .markers import (Marker, Asterisk, Circle, CircleCross, CircleX, Cross,\n Diamond, DiamondCross, InvertedTriangle, Square,\n SquareCross, SquareX, Triangle, X)\n\n# Fool pyflakes\n(Marker, Asterisk, Circle, CircleCross, CircleX, Cross, Diamond, DiamondCross,\nInvertedTriangle, Square, SquareCross, SquareX, Triangle, X)\n", "meta": {"content_hash": "2fb1e55ec163f4d95798df477413a121", "timestamp": "", "source": "github", "line_count": 973, "max_line_length": 119, "avg_line_length": 26.082219938335047, "alnum_prop": 0.6164788399401055, "repo_name": "daodaoliang/bokeh", "id": "cd15e0575fe6415967abbe568934fda1d0eb3ac8", "size": "25408", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "bokeh/models/glyphs.py", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Batchfile", "bytes": "5455"}, {"name": "CSS", "bytes": "413395"}, {"name": "CoffeeScript", "bytes": "1995470"}, {"name": "HTML", "bytes": "1545838"}, {"name": "JavaScript", "bytes": "4747"}, {"name": "Makefile", "bytes": "5785"}, {"name": "Python", "bytes": "1381168"}, {"name": "Shell", "bytes": "13857"}]}} {"text": "stage: Verify\ngroup: Testing\ninfo: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/engineering/ux/technical-writing/#designated-technical-writers\ntype: reference, howto\n---\n\n# Load Performance Testing **(PREMIUM)**\n\n> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/10683) in [GitLab Premium](https://about.gitlab.com/pricing/) 13.2.\n\nWith Load Performance Testing, you can test the impact of any pending code changes\nto your application's backend in [GitLab CI/CD](../../../ci/README.md).\n\nGitLab uses [k6](https://k6.io/), a free and open source\ntool, for measuring the system performance of applications under\nload.\n\nUnlike [Browser Performance Testing](browser_performance_testing.md), which is\nused to measure how web sites perform in client browsers, Load Performance Testing\ncan be used to perform various types of [load tests](https://k6.io/docs/#use-cases)\nagainst application endpoints such as APIs, Web Controllers, and so on.\nThis can be used to test how the backend or the server performs at scale.\n\nFor example, you can use Load Performance Testing to perform many concurrent\nGET calls to a popular API endpoint in your application to see how it performs.\n\n## How Load Performance Testing works\n\nFirst, define a job in your `.gitlab-ci.yml` file that generates the\n[Load Performance report artifact](../../../ci/pipelines/job_artifacts.md#artifactsreportsload_performance-premium).\nGitLab checks this report, compares key load performance metrics\nbetween the source and target branches, and then shows the information in a merge request widget:\n\n![Load Performance Widget](img/load_performance_testing.png)\n\nNext, you need to configure the test environment and write the k6 test.\n\nThe key performance metrics that the merge request widget shows after the test completes are:\n\n- Checks: The percentage pass rate of the [checks](https://k6.io/docs/using-k6/checks) configured in the k6 test.\n- TTFB P90: The 90th percentile of how long it took to start receiving responses, aka the [Time to First Byte](https://en.wikipedia.org/wiki/Time_to_first_byte) (TTFB).\n- TTFB P95: The 95th percentile for TTFB.\n- RPS: The average requests per second (RPS) rate the test was able to achieve.\n\nNOTE: **Note:**\nIf the Load Performance report has no data to compare, such as when you add the\nLoad Performance job in your `.gitlab-ci.yml` for the very first time,\nthe Load Performance report widget won't show. It must have run at least\nonce on the target branch (`master`, for example), before it will display in a\nmerge request targeting that branch.\n\n## Configure the Load Performance Testing job\n\nConfiguring your Load Performance Testing job can be broken down into several distinct parts:\n\n- Determine the test parameters such as throughput, and so on.\n- Set up the target test environment for load performance testing.\n- Design and write the k6 test.\n\n### Determine the test parameters\n\nThe first thing you need to do is determine the [type of load test](https://k6.io/docs/test-types/introduction)\nyou want to run, and how it will run (for example, the number of users, throughput, and so on).\n\nRefer to the [k6 docs](https://k6.io/docs/), especially the [k6 testing guides](https://k6.io/docs/testing-guides),\nfor guidance on the above and more.\n\n### Test Environment setup\n\nA large part of the effort around load performance testing is to prepare the target test environment\nfor high loads. You should ensure it's able to handle the\n[throughput](https://k6.io/blog/monthly-visits-concurrent-users) it will be tested with.\n\nIt's also typically required to have representative test data in the target environment\nfor the load performance test to use.\n\nWe strongly recommend [not running these tests against a production environment](https://k6.io/our-beliefs#load-test-in-a-pre-production-environment).\n\n### Write the load performance test\n\nAfter the environment is prepared, you can write the k6 test itself. k6 is a flexible\ntool and can be used to run [many kinds of performance tests](https://k6.io/docs/test-types/introduction).\nRefer to the [k6 documentation](https://k6.io/docs/) for detailed information on how to write tests.\n\n### Configure the test in GitLab CI/CD\n\nWhen your k6 test is ready, the next step is to configure the load performance\ntesting job in GitLab CI/CD. The easiest way to do this is to use the\n[`Verify/Load-Performance-Testing.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Verify/Load-Performance-Testing.gitlab-ci.yml)\ntemplate that is included with GitLab.\n\nNOTE: **Note:**\nFor large scale k6 tests you need to ensure the GitLab Runner instance performing the actual\ntest is able to handle running the test. Refer to [k6's guidance](https://k6.io/docs/testing-guides/running-large-tests#hardware-considerations)\nfor spec details. The [default shared GitLab.com runners](../../gitlab_com/#linux-shared-runners)\nlikely have insufficient specs to handle most large k6 tests.\n\nThis template runs the\n[k6 Docker container](https://hub.docker.com/r/loadimpact/k6/) in the job and provides several ways to customize the\njob.\n\nAn example configuration workflow:\n\n1. Set up a GitLab Runner that can run Docker containers, such as a Runner using the\n [Docker-in-Docker workflow](../../../ci/docker/using_docker_build.md#use-docker-in-docker-workflow-with-docker-executor).\n1. Configure the default Load Performance Testing CI job in your `.gitlab-ci.yml` file.\n You need to include the template and configure it with variables:\n\n ```yaml\n include:\n template: Verify/Load-Performance-Testing.gitlab-ci.yml\n\n load_performance:\n variables:\n K6_TEST_FILE: \n ```\n\nThe above example creates a `load_performance` job in your CI/CD pipeline that runs\nthe k6 test.\n\nNOTE: **Note:**\nFor Kubernetes setups a different template should be used: [`Jobs/Load-Performance-Testing.gitlab-ci.yml`](https://gitlab.com/gitlab-org/gitlab/blob/master/lib/gitlab/ci/templates/Jobs/Load-Performance-Testing.gitlab-ci.yml).\n\nk6 has [various options](https://k6.io/docs/using-k6/options) to configure how it will run tests, such as what throughput (RPS) to run with,\nhow long the test should run, and so on. Almost all options can be configured in the test itself, but as\nyou can also pass command line options via the `K6_OPTIONS` variable.\n\nFor example, you can override the duration of the test with a CLI option:\n\n```yaml\n include:\n template: Verify/Load-Performance-Testing.gitlab-ci.yml\n\n load_performance:\n variables:\n K6_TEST_FILE: \n K6_OPTIONS: '--duration 30s'\n```\n\nGitLab only displays the key performance metrics in the MR widget if k6's results are saved\nvia [summary export](https://k6.io/docs/results-visualization/json#summary-export)\nas a [Load Performance report artifact](../../../ci/pipelines/job_artifacts.md#artifactsreportsload_performance-premium).\nThe latest Load Performance artifact available is always used, using the\nsummary values from the test.\n\nIf [GitLab Pages](../pages/index.md) is enabled, you can view the report directly in your browser.\n\n### Load Performance testing in Review Apps\n\nThe CI/CD YAML configuration example above works for testing against static environments,\nbut it can be extended to work with [review apps](../../../ci/review_apps) or\n[dynamic environments](../../../ci/environments) with a few extra steps.\n\nThe best approach is to capture the dynamic URL into a custom environment variable that\nis then [inherited](../../../ci/variables/README.md#inherit-environment-variables)\nby the `load_performance` job. The k6 test script to be run should then be configured to\nuse that environment URL, such as: ``http.get(`${__ENV.ENVIRONMENT_URL`})``.\n\nFor example:\n\n1. In the `review` job:\n 1. Capture the dynamic URL and save it into a `.env` file, e.g. `echo \"ENVIRONMENT_URL=$CI_ENVIRONMENT_URL\" >> review.env`.\n 1. Set the `.env` file to be an [`artifacts:reports:dotenv` report](../../../ci/variables/README.md#inherit-environment-variables).\n1. Set the `load_performance` job to depend on the review job, so it inherits the environment variable.\n1. Configure the k6 test script to use the environment variable in it's steps.\n\nYour `.gitlab-ci.yml` file might be similar to:\n\n```yaml\nstages:\n - deploy\n - performance\n\ninclude:\n template: Verify/Load-Performance-Testing.gitlab-ci.yml\n\nreview:\n stage: deploy\n environment:\n name: review/$CI_COMMIT_REF_NAME\n url: http://$CI_ENVIRONMENT_SLUG.example.com\n script:\n - run_deploy_script\n - echo \"ENVIRONMENT_URL=$CI_ENVIRONMENT_URL\" >> review.env\n artifacts:\n reports:\n dotenv:\n review.env\n rules:\n - if: '$CI_COMMIT_BRANCH' # Modify to match your pipeline rules, or use `only/except` if needed.\n\nload_performance:\n dependencies:\n - review\n rules:\n - if: '$CI_COMMIT_BRANCH' # Modify to match your pipeline rules, or use `only/except` if needed.\n```\n", "meta": {"content_hash": "675d5958436694ac01e56f97ad21f2c6", "timestamp": "", "source": "github", "line_count": 197, "max_line_length": 225, "avg_line_length": 45.9492385786802, "alnum_prop": 0.7568493150684932, "repo_name": "mmkassem/gitlabhq", "id": "97f4f202ab376e7a67e7fbf50bda0412315358d7", "size": "9056", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "doc/user/project/merge_requests/load_performance_testing.md", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "113683"}, {"name": "CoffeeScript", "bytes": "139197"}, {"name": "Cucumber", "bytes": "119759"}, {"name": "HTML", "bytes": "447030"}, {"name": "JavaScript", "bytes": "29805"}, {"name": "Ruby", "bytes": "2417833"}, {"name": "Shell", "bytes": "14336"}]}} {"text": "import { NgModule } from '@angular/core';\nimport { Routes, RouterModule } from '@angular/router';\nimport {DashboardComponent} from './dashboard.component';\n\nconst routes: Routes = [\n {\n path: '',\n component: DashboardComponent\n }\n];\n\n@NgModule({\n imports: [RouterModule.forChild(routes)],\n exports: [RouterModule]\n})\nexport class DashboardRoutingModule { }\n", "meta": {"content_hash": "eae6b9a2be06708b54bbe807aa5a233f", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 57, "avg_line_length": 22.9375, "alnum_prop": 0.7002724795640327, "repo_name": "aaronfurtado93/pwa-core", "id": "ee7ab816e54c01c739a14f328bf3f3639a9f78a5", "size": "367", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/pages/dashboard/dashboard-routing.module.ts", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "2373"}, {"name": "HTML", "bytes": "7291"}, {"name": "JavaScript", "bytes": "1645"}, {"name": "TypeScript", "bytes": "56424"}]}} {"text": "var soundMap = [{\n name: 'river water',\n url: \"http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/sounds/Water_Lap_River_Edge_Gentle.L.wav\",\n audioOptions: {\n position: {\n x: 580,\n y: 493,\n z: 528\n },\n volume: 0.4,\n loop: true\n }\n}, {\n name: 'windmill',\n url: \"http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/sounds/WINDMILL_Mono.wav\",\n audioOptions: {\n position: {\n x: 530,\n y: 516,\n z: 518\n },\n volume: 0.08,\n loop: true\n }\n}, {\n name: 'insects',\n url: \"http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/sounds/insects3.wav\",\n audioOptions: {\n position: {\n x: 560,\n y: 495,\n z: 474\n },\n volume: 0.25,\n loop: true\n }\n}, {\n name: 'fireplace',\n url: \"http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/sounds/0619_Fireplace__Tree_B.L.wav\",\n audioOptions: {\n position: {\n x: 551.61,\n y: 494.88,\n z: 502.00\n },\n volume: 0.25,\n loop: true\n }\n}, {\n name: 'cat purring',\n url: \"http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/sounds/Cat_Purring_Deep_Low_Snor.wav\",\n audioOptions: {\n position: {\n x: 551.48,\n y: 495.60,\n z: 502.08\n },\n volume: 0.03,\n loop: true\n }\n}, {\n name: 'dogs barking',\n url: \"http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/sounds/dogs_barking_1.L.wav\",\n audioOptions: {\n position: {\n x: 523,\n y: 494.88,\n z: 469\n },\n volume: 0.05,\n loop: false\n },\n playAtInterval: 60 * 1000\n}, {\n name: 'arcade game',\n url: \"http://hifi-production.s3.amazonaws.com/DomainContent/Toybox/sounds/ARCADE_GAMES_VID.L.L.wav\",\n audioOptions: {\n position: {\n x: 543.77,\n y: 495.07,\n z: 502.25\n },\n volume: 0.01,\n loop: false,\n },\n playAtInterval: 90 * 1000\n}];\n\nfunction loadSounds() {\n soundMap.forEach(function(soundData) {\n soundData.sound = SoundCache.getSound(soundData.url);\n });\n}\n\nfunction playSound(soundData) {\n if (soundData.injector) {\n // try/catch in case the injector QObject has been deleted already\n try {\n soundData.injector.stop();\n } catch (e) {}\n }\n soundData.injector = Audio.playSound(soundData.sound, soundData.audioOptions);\n}\n\nfunction checkDownloaded(soundData) {\n if (soundData.sound.downloaded) {\n\n Script.clearInterval(soundData.downloadTimer);\n\n if (soundData.hasOwnProperty('playAtInterval')) {\n soundData.playingInterval = Script.setInterval(function() {\n playSound(soundData)\n }, soundData.playAtInterval);\n } else {\n playSound(soundData);\n }\n\n }\n}\n\nfunction startCheckDownloadedTimers() {\n soundMap.forEach(function(soundData) {\n soundData.downloadTimer = Script.setInterval(function() {\n checkDownloaded(soundData);\n }, 1000);\n });\n}\n\nScript.scriptEnding.connect(function() {\n soundMap.forEach(function(soundData) {\n\n if (soundData.hasOwnProperty(\"injector\")) {\n soundData.injector.stop();\n }\n\n if (soundData.hasOwnProperty(\"downloadTimer\")) {\n Script.clearInterval(soundData.downloadTimer);\n }\n\n if (soundData.hasOwnProperty(\"playingInterval\")) {\n Script.clearInterval(soundData.playingInterval);\n }\n\n });\n\n});\n\nloadSounds();\nstartCheckDownloadedTimers();", "meta": {"content_hash": "7591a315854d23c17be8ef168571926c", "timestamp": "", "source": "github", "line_count": 149, "max_line_length": 113, "avg_line_length": 24.973154362416107, "alnum_prop": 0.5530771298038162, "repo_name": "misslivirose/hifi-content", "id": "e73630e38068ba37528ceb120332fc10312fa904", "size": "4031", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "DomainContent/Toybox/AC_scripts/toybox_sounds.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "98041"}, {"name": "GLSL", "bytes": "3603"}, {"name": "HTML", "bytes": "316682"}, {"name": "JavaScript", "bytes": "3778239"}, {"name": "Makefile", "bytes": "317"}]}} {"text": "\n\n#include \"types.h\"\n#include \"qpid/Msg.h\"\n#include \"qpid/broker/Message.h\"\n#include \"qpid/broker/Queue.h\"\n#include \"qpid/Exception.h\"\n#include \n#include \n#include \n#include \n\nnamespace qpid {\nnamespace ha {\n\nusing namespace std;\n\nconst string QPID_REPLICATE(\"qpid.replicate\");\nconst string QPID_HA_UUID(\"qpid.ha-uuid\");\n\nconst char* QPID_HA_PREFIX = \"qpid.ha-\";\nconst char* QUEUE_REPLICATOR_PREFIX = \"qpid.ha-q:\";\n\nbool startsWith(const string& name, const string& prefix) {\n return name.compare(0, prefix.size(), prefix) == 0;\n}\n\nstring EnumBase::str() const {\n assert(value < count);\n return names[value];\n}\n\nvoid EnumBase::parse(const string& s) {\n if (!parseNoThrow(s))\n throw Exception(QPID_MSG(\"Invalid \" << name << \" value: \" << s));\n}\n\nbool EnumBase::parseNoThrow(const string& s) {\n const char** i = find(names, names+count, s);\n value = i - names;\n return value < count;\n}\n\ntemplate <> const char* Enum::NAME = \"replication\";\ntemplate <> const char* Enum::NAMES[] = { \"none\", \"configuration\", \"all\" };\ntemplate <> const size_t Enum::N = 3;\n\ntemplate <> const char* Enum::NAME = \"HA broker status\";\n\n// NOTE: Changing status names will have an impact on qpid-ha and\n// the qpidd-primary init script.\n// Don't change them unless you are going to update all dependent code.\n//\ntemplate <> const char* Enum::NAMES[] = {\n \"joining\", \"catchup\", \"ready\", \"recovering\", \"active\", \"standalone\"\n};\ntemplate <> const size_t Enum::N = 6;\n\nostream& operator<<(ostream& o, EnumBase e) {\n return o << e.str();\n}\n\nistream& operator>>(istream& i, EnumBase& e) {\n string s;\n i >> s;\n e.parse(s);\n return i;\n}\n\nostream& operator<<(ostream& o, const UuidSet& ids) {\n ostream_iterator out(o, \" \");\n o << \"{ \";\n for (UuidSet::const_iterator i = ids.begin(); i != ids.end(); ++i)\n o << shortStr(*i) << \" \";\n o << \"}\";\n return o;\n}\n\n\nstd::string logMessageId(const std::string& q, QueuePosition pos, ReplicationId id) {\n return Msg() << q << \"[\" << pos << \"]\" << \"=\" << id;\n}\nstd::string logMessageId(const std::string& q, ReplicationId id) {\n return Msg() << q << \"[]\" << \"=\" << id;\n}\nstd::string logMessageId(const std::string& q, const broker::Message& m) {\n return logMessageId(q, m.getSequence(), m.getReplicationId());\n}\nstd::string logMessageId(const broker::Queue& q, QueuePosition pos, ReplicationId id) {\n return logMessageId(q.getName(), pos, id);\n}\nstd::string logMessageId(const broker::Queue& q, ReplicationId id) {\n return logMessageId(q.getName(), id);\n}\nstd::string logMessageId(const broker::Queue& q, const broker::Message& m) {\n return logMessageId(q.getName(), m);\n}\n\nvoid UuidSet::encode(framing::Buffer& b) const {\n b.putLong(size());\n for (const_iterator i = begin(); i != end(); ++i)\n b.putRawData(i->data(), i->size());\n}\n\nvoid UuidSet::decode(framing::Buffer& b) {\n size_t n = b.getLong();\n for ( ; n > 0; --n) {\n types::Uuid id;\n b.getRawData(const_cast(id.data()), id.size());\n insert(id);\n }\n}\n\nsize_t UuidSet::encodedSize() const {\n return sizeof(uint32_t) + size()*16;\n}\n\n\n}} // namespace qpid::ha\n", "meta": {"content_hash": "f21d21cc471937097f1fbe5958e3b91d", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 91, "avg_line_length": 27.88235294117647, "alnum_prop": 0.6347197106690777, "repo_name": "mbroadst/debian-qpid-cpp", "id": "3088661c954598de1fa21e93a4e568376e1b2305", "size": "4131", "binary": false, "copies": "3", "ref": "refs/heads/trusty", "path": "src/qpid/ha/types.cpp", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2126"}, {"name": "C", "bytes": "67620"}, {"name": "C#", "bytes": "121990"}, {"name": "C++", "bytes": "7291355"}, {"name": "CMake", "bytes": "169753"}, {"name": "Cucumber", "bytes": "15299"}, {"name": "Emacs Lisp", "bytes": "7379"}, {"name": "HTML", "bytes": "4773"}, {"name": "Makefile", "bytes": "1261"}, {"name": "Perl", "bytes": "86022"}, {"name": "Perl6", "bytes": "2703"}, {"name": "PowerShell", "bytes": "51728"}, {"name": "Python", "bytes": "1523448"}, {"name": "Ruby", "bytes": "305919"}, {"name": "Shell", "bytes": "128410"}]}} {"text": "\npackage com.amazonaws.services.lambda.model;\n\nimport java.io.Serializable;\n\nimport com.amazonaws.AmazonWebServiceRequest;\n\n/**\n * Container for the parameters to the {@link com.amazonaws.services.lambda.AWSLambda#listEventSourceMappings(ListEventSourceMappingsRequest) ListEventSourceMappings operation}.\n *

\n * Returns a list of event source mappings you created using the\n * CreateEventSourceMapping (see CreateEventSourceMapping),\n * where you identify a stream as an event source. This list does not\n * include Amazon S3 event sources.\n *

\n *

\n * For each mapping, the API returns configuration information. You can\n * optionally specify filters to retrieve specific event source mappings.\n *

\n *

\n * This operation requires permission for the\n * lambda:ListEventSourceMappings action.\n *

\n *\n * @see com.amazonaws.services.lambda.AWSLambda#listEventSourceMappings(ListEventSourceMappingsRequest)\n */\npublic class ListEventSourceMappingsRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {\n\n /**\n * The Amazon Resource Name (ARN) of the Amazon Kinesis stream.\n *

\n * Constraints:
\n * Pattern: arn:aws:([a-zA-Z0-9\\-])+:([a-z]{2}-[a-z]+-\\d{1})?:(\\d{12})?:(.*)
\n */\n private String eventSourceArn;\n\n /**\n * The name of the Lambda function.

You can specify an unqualified\n * function name (for example, \"Thumbnail\") or you can specify Amazon\n * Resource Name (ARN) of the function (for example,\n * \"arn:aws:lambda:us-west-2:account-id:function:ThumbNail\"). AWS Lambda\n * also allows you to specify only the account ID qualifier (for example,\n * \"account-id:Thumbnail\"). Note that the length constraint applies only\n * to the ARN. If you specify only the function name, it is limited to 64\n * character in length.\n *

\n * Constraints:
\n * Length: 1 - 140
\n * Pattern: (arn:aws:lambda:)?([a-z]{2}-[a-z]+-\\d{1}:)?(\\d{12}:)?(function:)?([a-zA-Z0-9-_]+)(:(\\$LATEST|[a-zA-Z0-9-_]+))?
\n */\n private String functionName;\n\n /**\n * Optional string. An opaque pagination token returned from a previous\n * ListEventSourceMappings operation. If present, specifies\n * to continue the list from where the returning call left off.\n */\n private String marker;\n\n /**\n * Optional integer. Specifies the maximum number of event sources to\n * return in response. This value must be greater than 0.\n *

\n * Constraints:
\n * Range: 1 - 10000
\n */\n private Integer maxItems;\n\n /**\n * The Amazon Resource Name (ARN) of the Amazon Kinesis stream.\n *

\n * Constraints:
\n * Pattern: arn:aws:([a-zA-Z0-9\\-])+:([a-z]{2}-[a-z]+-\\d{1})?:(\\d{12})?:(.*)
\n *\n * @return The Amazon Resource Name (ARN) of the Amazon Kinesis stream.\n */\n public String getEventSourceArn() {\n return eventSourceArn;\n }\n \n /**\n * The Amazon Resource Name (ARN) of the Amazon Kinesis stream.\n *

\n * Constraints:
\n * Pattern: arn:aws:([a-zA-Z0-9\\-])+:([a-z]{2}-[a-z]+-\\d{1})?:(\\d{12})?:(.*)
\n *\n * @param eventSourceArn The Amazon Resource Name (ARN) of the Amazon Kinesis stream.\n */\n public void setEventSourceArn(String eventSourceArn) {\n this.eventSourceArn = eventSourceArn;\n }\n \n /**\n * The Amazon Resource Name (ARN) of the Amazon Kinesis stream.\n *

\n * Returns a reference to this object so that method calls can be chained together.\n *

\n * Constraints:
\n * Pattern: arn:aws:([a-zA-Z0-9\\-])+:([a-z]{2}-[a-z]+-\\d{1})?:(\\d{12})?:(.*)
\n *\n * @param eventSourceArn The Amazon Resource Name (ARN) of the Amazon Kinesis stream.\n *\n * @return A reference to this updated object so that method calls can be chained\n * together.\n */\n public ListEventSourceMappingsRequest withEventSourceArn(String eventSourceArn) {\n this.eventSourceArn = eventSourceArn;\n return this;\n }\n\n /**\n * The name of the Lambda function.

You can specify an unqualified\n * function name (for example, \"Thumbnail\") or you can specify Amazon\n * Resource Name (ARN) of the function (for example,\n * \"arn:aws:lambda:us-west-2:account-id:function:ThumbNail\"). AWS Lambda\n * also allows you to specify only the account ID qualifier (for example,\n * \"account-id:Thumbnail\"). Note that the length constraint applies only\n * to the ARN. If you specify only the function name, it is limited to 64\n * character in length.\n *

\n * Constraints:
\n * Length: 1 - 140
\n * Pattern: (arn:aws:lambda:)?([a-z]{2}-[a-z]+-\\d{1}:)?(\\d{12}:)?(function:)?([a-zA-Z0-9-_]+)(:(\\$LATEST|[a-zA-Z0-9-_]+))?
\n *\n * @return The name of the Lambda function.

You can specify an unqualified\n * function name (for example, \"Thumbnail\") or you can specify Amazon\n * Resource Name (ARN) of the function (for example,\n * \"arn:aws:lambda:us-west-2:account-id:function:ThumbNail\"). AWS Lambda\n * also allows you to specify only the account ID qualifier (for example,\n * \"account-id:Thumbnail\"). Note that the length constraint applies only\n * to the ARN. If you specify only the function name, it is limited to 64\n * character in length.\n */\n public String getFunctionName() {\n return functionName;\n }\n \n /**\n * The name of the Lambda function.

You can specify an unqualified\n * function name (for example, \"Thumbnail\") or you can specify Amazon\n * Resource Name (ARN) of the function (for example,\n * \"arn:aws:lambda:us-west-2:account-id:function:ThumbNail\"). AWS Lambda\n * also allows you to specify only the account ID qualifier (for example,\n * \"account-id:Thumbnail\"). Note that the length constraint applies only\n * to the ARN. If you specify only the function name, it is limited to 64\n * character in length.\n *

\n * Constraints:
\n * Length: 1 - 140
\n * Pattern: (arn:aws:lambda:)?([a-z]{2}-[a-z]+-\\d{1}:)?(\\d{12}:)?(function:)?([a-zA-Z0-9-_]+)(:(\\$LATEST|[a-zA-Z0-9-_]+))?
\n *\n * @param functionName The name of the Lambda function.

You can specify an unqualified\n * function name (for example, \"Thumbnail\") or you can specify Amazon\n * Resource Name (ARN) of the function (for example,\n * \"arn:aws:lambda:us-west-2:account-id:function:ThumbNail\"). AWS Lambda\n * also allows you to specify only the account ID qualifier (for example,\n * \"account-id:Thumbnail\"). Note that the length constraint applies only\n * to the ARN. If you specify only the function name, it is limited to 64\n * character in length.\n */\n public void setFunctionName(String functionName) {\n this.functionName = functionName;\n }\n \n /**\n * The name of the Lambda function.

You can specify an unqualified\n * function name (for example, \"Thumbnail\") or you can specify Amazon\n * Resource Name (ARN) of the function (for example,\n * \"arn:aws:lambda:us-west-2:account-id:function:ThumbNail\"). AWS Lambda\n * also allows you to specify only the account ID qualifier (for example,\n * \"account-id:Thumbnail\"). Note that the length constraint applies only\n * to the ARN. If you specify only the function name, it is limited to 64\n * character in length.\n *

\n * Returns a reference to this object so that method calls can be chained together.\n *

\n * Constraints:
\n * Length: 1 - 140
\n * Pattern: (arn:aws:lambda:)?([a-z]{2}-[a-z]+-\\d{1}:)?(\\d{12}:)?(function:)?([a-zA-Z0-9-_]+)(:(\\$LATEST|[a-zA-Z0-9-_]+))?
\n *\n * @param functionName The name of the Lambda function.

You can specify an unqualified\n * function name (for example, \"Thumbnail\") or you can specify Amazon\n * Resource Name (ARN) of the function (for example,\n * \"arn:aws:lambda:us-west-2:account-id:function:ThumbNail\"). AWS Lambda\n * also allows you to specify only the account ID qualifier (for example,\n * \"account-id:Thumbnail\"). Note that the length constraint applies only\n * to the ARN. If you specify only the function name, it is limited to 64\n * character in length.\n *\n * @return A reference to this updated object so that method calls can be chained\n * together.\n */\n public ListEventSourceMappingsRequest withFunctionName(String functionName) {\n this.functionName = functionName;\n return this;\n }\n\n /**\n * Optional string. An opaque pagination token returned from a previous\n * ListEventSourceMappings operation. If present, specifies\n * to continue the list from where the returning call left off.\n *\n * @return Optional string. An opaque pagination token returned from a previous\n * ListEventSourceMappings operation. If present, specifies\n * to continue the list from where the returning call left off.\n */\n public String getMarker() {\n return marker;\n }\n \n /**\n * Optional string. An opaque pagination token returned from a previous\n * ListEventSourceMappings operation. If present, specifies\n * to continue the list from where the returning call left off.\n *\n * @param marker Optional string. An opaque pagination token returned from a previous\n * ListEventSourceMappings operation. If present, specifies\n * to continue the list from where the returning call left off.\n */\n public void setMarker(String marker) {\n this.marker = marker;\n }\n \n /**\n * Optional string. An opaque pagination token returned from a previous\n * ListEventSourceMappings operation. If present, specifies\n * to continue the list from where the returning call left off.\n *

\n * Returns a reference to this object so that method calls can be chained together.\n *\n * @param marker Optional string. An opaque pagination token returned from a previous\n * ListEventSourceMappings operation. If present, specifies\n * to continue the list from where the returning call left off.\n *\n * @return A reference to this updated object so that method calls can be chained\n * together.\n */\n public ListEventSourceMappingsRequest withMarker(String marker) {\n this.marker = marker;\n return this;\n }\n\n /**\n * Optional integer. Specifies the maximum number of event sources to\n * return in response. This value must be greater than 0.\n *

\n * Constraints:
\n * Range: 1 - 10000
\n *\n * @return Optional integer. Specifies the maximum number of event sources to\n * return in response. This value must be greater than 0.\n */\n public Integer getMaxItems() {\n return maxItems;\n }\n \n /**\n * Optional integer. Specifies the maximum number of event sources to\n * return in response. This value must be greater than 0.\n *

\n * Constraints:
\n * Range: 1 - 10000
\n *\n * @param maxItems Optional integer. Specifies the maximum number of event sources to\n * return in response. This value must be greater than 0.\n */\n public void setMaxItems(Integer maxItems) {\n this.maxItems = maxItems;\n }\n \n /**\n * Optional integer. Specifies the maximum number of event sources to\n * return in response. This value must be greater than 0.\n *

\n * Returns a reference to this object so that method calls can be chained together.\n *

\n * Constraints:
\n * Range: 1 - 10000
\n *\n * @param maxItems Optional integer. Specifies the maximum number of event sources to\n * return in response. This value must be greater than 0.\n *\n * @return A reference to this updated object so that method calls can be chained\n * together.\n */\n public ListEventSourceMappingsRequest withMaxItems(Integer maxItems) {\n this.maxItems = maxItems;\n return this;\n }\n\n /**\n * Returns a string representation of this object; useful for testing and\n * debugging.\n *\n * @return A string representation of this object.\n *\n * @see java.lang.Object#toString()\n */\n @Override\n public String toString() {\n StringBuilder sb = new StringBuilder();\n sb.append(\"{\");\n if (getEventSourceArn() != null) sb.append(\"EventSourceArn: \" + getEventSourceArn() + \",\");\n if (getFunctionName() != null) sb.append(\"FunctionName: \" + getFunctionName() + \",\");\n if (getMarker() != null) sb.append(\"Marker: \" + getMarker() + \",\");\n if (getMaxItems() != null) sb.append(\"MaxItems: \" + getMaxItems() );\n sb.append(\"}\");\n return sb.toString();\n }\n \n @Override\n public int hashCode() {\n final int prime = 31;\n int hashCode = 1;\n \n hashCode = prime * hashCode + ((getEventSourceArn() == null) ? 0 : getEventSourceArn().hashCode()); \n hashCode = prime * hashCode + ((getFunctionName() == null) ? 0 : getFunctionName().hashCode()); \n hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); \n hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode()); \n return hashCode;\n }\n \n @Override\n public boolean equals(Object obj) {\n if (this == obj) return true;\n if (obj == null) return false;\n\n if (obj instanceof ListEventSourceMappingsRequest == false) return false;\n ListEventSourceMappingsRequest other = (ListEventSourceMappingsRequest)obj;\n \n if (other.getEventSourceArn() == null ^ this.getEventSourceArn() == null) return false;\n if (other.getEventSourceArn() != null && other.getEventSourceArn().equals(this.getEventSourceArn()) == false) return false; \n if (other.getFunctionName() == null ^ this.getFunctionName() == null) return false;\n if (other.getFunctionName() != null && other.getFunctionName().equals(this.getFunctionName()) == false) return false; \n if (other.getMarker() == null ^ this.getMarker() == null) return false;\n if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; \n if (other.getMaxItems() == null ^ this.getMaxItems() == null) return false;\n if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false) return false; \n return true;\n }\n \n @Override\n public ListEventSourceMappingsRequest clone() {\n \n return (ListEventSourceMappingsRequest) super.clone();\n }\n\n}\n ", "meta": {"content_hash": "cb90ef3b857eb8c5397f5a3984299a48", "timestamp": "", "source": "github", "line_count": 349, "max_line_length": 177, "avg_line_length": 43.64756446991404, "alnum_prop": 0.6281756712400709, "repo_name": "sdole/aws-sdk-java", "id": "b9a189e8fa04f19eb34f188672731c8eda1ff188", "size": "15820", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "aws-java-sdk-lambda/src/main/java/com/amazonaws/services/lambda/model/ListEventSourceMappingsRequest.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "97734456"}, {"name": "Scilab", "bytes": "2354"}]}} {"text": "/* */ \nvar MapCache = require('./_MapCache');\nvar FUNC_ERROR_TEXT = 'Expected a function';\nfunction memoize(func, resolver) {\n if (typeof func != 'function' || (resolver != null && typeof resolver != 'function')) {\n throw new TypeError(FUNC_ERROR_TEXT);\n }\n var memoized = function() {\n var args = arguments,\n key = resolver ? resolver.apply(this, args) : args[0],\n cache = memoized.cache;\n if (cache.has(key)) {\n return cache.get(key);\n }\n var result = func.apply(this, args);\n memoized.cache = cache.set(key, result) || cache;\n return result;\n };\n memoized.cache = new (memoize.Cache || MapCache);\n return memoized;\n}\nmemoize.Cache = MapCache;\nmodule.exports = memoize;\n", "meta": {"content_hash": "b944ad8be55e4358263ada0f450ec961", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 89, "avg_line_length": 31.217391304347824, "alnum_prop": 0.6309192200557103, "repo_name": "onlabsorg/olowc", "id": "b51dbeabd4318c9550b9f3cfb116530a86677b74", "size": "718", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "jspm_packages/npm/lodash@4.17.4/memoize.js", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "2128"}, {"name": "JavaScript", "bytes": "2069038"}, {"name": "Makefile", "bytes": "188"}, {"name": "OCaml", "bytes": "485"}, {"name": "Shell", "bytes": "973"}]}} {"text": "\ufeffusing UnityEngine;\nusing System.Collections;\nusing System.Collections.Generic;\nusing System.Xml;\nusing System;\n\npublic class SystemScrollviews : MonoBehaviour \n{\n\tpublic GameObject improvementMessageLabel, availableImprovements, buttonLabel, improvementParent, improvementsWindow, improvementDetails;\n\tpublic int techTierToShow, selectedPlanet, selectedSystem, selectedSlot;\n\tpublic GameObject[] tabs = new GameObject[4];\n\tprivate string improvementText, currentImprovement;\n\tpublic UILabel improvementLabel, improvementWealthCost, improvementPowerCost, systemEffects, improvementWealthUpkeep, improvementPowerUpkeep, systemUpkeepPower, systemUpkeepWealth;\n\n\tpublic GameObject[] unbuiltImprovementList = new GameObject[10];\n\tpublic GameObject[] improvementsList = new GameObject[8];\n\tprivate ImprovementsBasic improvementsBasic;\n\n\tvoid Start()\n\t{\t\t\n\t\tSetUpImprovementLabels ();\n\t\tselectedPlanet = -1;\n\t}\n\n\tprivate void SetUpImprovementLabels()\n\t{\t\t\n\t\tfor(int i = 0; i < improvementsList.Length; ++i)\n\t\t{\n\t\t\tEventDelegate.Add(improvementsList[i].GetComponent().onClick, OpenImprovementsWindow);\n\t\t\t\n\t\t\tNGUITools.SetActive(improvementsList[i], false); //Default set improvement to false so it won't be shown in scrollview unless needed\n\t\t}\n\n\t\tfor(int i = 0; i < unbuiltImprovementList.Length; ++i)\n\t\t{\n\t\t\tNGUITools.SetActive(unbuiltImprovementList[i], false);\n\n\t\t\tEventDelegate.Add(unbuiltImprovementList[i].GetComponent().onClick, ShowDetails);\n\t\t}\n\t}\n\n\tprivate void OpenImprovementsWindow()\n\t{\n\t\tif(MasterScript.systemListConstructor.systemList[selectedSystem].systemOwnedBy == MasterScript.playerTurnScript.playerRace)\n\t\t{\n\t\t\tNGUITools.SetActive (improvementsWindow, true);\n\t\t\tNGUITools.SetActive (improvementDetails, false);\n\t\t\tcurrentImprovement = null;\n\t\t\n\t\t\tbool reset = false;\n\n\t\t\tfor(int i = 0; i < tabs.Length; ++i)\n\t\t\t{\n\t\t\t\tif(tabs[i].GetComponent().spriteName == \"Button Hover (Orange)\")\n\t\t\t\t{\n\t\t\t\t\tUpdateImprovementsWindow (i);\n\t\t\t\t\treset = true;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif(reset == false)\n\t\t\t{\n\t\t\t\ttabs[0].GetComponent().enabled = false;\n\t\t\t\ttabs[0].GetComponent().spriteName = \"Button Hover (Orange)\";\n\t\t\t\tUpdateImprovementsWindow (0);\n\t\t\t}\n\n\t\t\tselectedSlot = -1;\n\t\t\t\n\t\t\tfor(int i = 0; i < improvementsList.Length; ++i)\n\t\t\t{\n\t\t\t\tif(UIButton.current.gameObject == improvementsList[i])\n\t\t\t\t{\n\t\t\t\t\tselectedSlot = i;\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate void ShowDetails()\n\t{\n\t\tfor(int i = 0; i < unbuiltImprovementList.Length; ++i)\n\t\t{\n\t\t\tif(UIButton.current.gameObject == unbuiltImprovementList[i])\n\t\t\t{\n\t\t\t\tunbuiltImprovementList[i].GetComponent().enabled = false;\n\t\t\t\tunbuiltImprovementList[i].GetComponent().spriteName = \"Button Hover (Orange)\";\n\t\t\t\tcurrentImprovement = UIButton.current.transform.Find (\"Label\").gameObject.GetComponent().text;\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\telse\n\t\t\t{\n\t\t\t\tunbuiltImprovementList[i].GetComponent().spriteName = \"Button Click\";\n\t\t\t\tunbuiltImprovementList[i].GetComponent().enabled = true;\n\t\t\t}\n\t\t}\n\n\t\tVector3 tempPos = UIButton.current.transform.localPosition;\n\n\t\timprovementDetails.transform.localPosition = new Vector3 (tempPos.x + 265f, tempPos.y, tempPos.z); \n\n\t\tfor(int i = 0; i < MasterScript.systemListConstructor.basicImprovementsList.Count; ++i)\n\t\t{\n\t\t\tif(MasterScript.systemListConstructor.basicImprovementsList[i].name.ToUpper() == UIButton.current.transform.Find (\"Label\").GetComponent().text)\n\t\t\t{\n\t\t\t\timprovementLabel.text = MasterScript.systemListConstructor.basicImprovementsList[i].details;\n\n\t\t\t\timprovementPowerCost.text = MasterScript.systemListConstructor.basicImprovementsList[i].cost.ToString();\n\t\t\t\timprovementWealthCost.text = (MasterScript.systemListConstructor.basicImprovementsList[i].cost / 25).ToString();\n\n\t\t\t\timprovementPowerUpkeep.text = \"-\" + MasterScript.systemListConstructor.basicImprovementsList[i].powerUpkeep.ToString();\n\t\t\t\timprovementWealthUpkeep.text = \"-\" + MasterScript.systemListConstructor.basicImprovementsList[i].wealthUpkeep.ToString();\n\t\t\t}\n\t\t}\n\n\t\tNGUITools.SetActive (improvementDetails, true);\n\t}\n\n\tprivate void UpdateImprovementsWindow(int level)\n\t{\n\t\tfor(int i = 0; i < unbuiltImprovementList.Length; ++i)\n\t\t{\n\t\t\tNGUITools.SetActive(unbuiltImprovementList[i], false);\n\t\t}\n\n\t\tint j = 0;\n\n\t\tfor(int i = 0; i < improvementsBasic.listOfImprovements.Count; ++i)\n\t\t{\n\t\t\tif(improvementsBasic.listOfImprovements[i].improvementLevel == level)\n\t\t\t{\n\t\t\t\tif(improvementsBasic.listOfImprovements[i].improvementCategory == \"Generic\" || improvementsBasic.listOfImprovements[i].improvementCategory == \"Defence\" \n\t\t\t\t || improvementsBasic.listOfImprovements[i].improvementCategory == MasterScript.playerTurnScript.playerRace)\n\t\t\t\t{\n\t\t\t\t\tif(improvementsBasic.listOfImprovements[i].hasBeenBuilt == false)\n\t\t\t\t\t{\n\t\t\t\t\t\tNGUITools.SetActive(unbuiltImprovementList[j], true);\n\t\t\t\t\t\t\n\t\t\t\t\t\tunbuiltImprovementList[j].transform.Find(\"Label\").GetComponent().text = improvementsBasic.listOfImprovements[i].improvementName.ToUpper();\n\n\t\t\t\t\t\t++j;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tfor(int i = j; j < unbuiltImprovementList.Length; ++j)\n\t\t{\n\t\t\tNGUITools.SetActive(unbuiltImprovementList[i], false);\n\t\t}\n\t}\n\n\tpublic void UpdateBuiltImprovements()\n\t{\n\t\tfor(int i = 0; i < improvementsList.Length; ++i) //For all improvement slots\n\t\t{\n\t\t\tif(i < MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].currentImprovementSlots) //If is equal to or less than planets slots\n\t\t\t{\n\t\t\t\tNGUITools.SetActive(improvementsList[i], true); //Activate\n\n\t\t\t\tif(MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].improvementsBuilt[i] != null) //If something built\n\t\t\t\t{\n\t\t\t\t\timprovementsList[i].transform.Find (\"Name\").GetComponent().text = MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].improvementsBuilt[i].ToUpper(); //Set text\n\t\t\t\t\timprovementsList[i].GetComponent().enabled = false;\n\t\t\t\t\timprovementsList[i].GetComponent().spriteName = \"Button Normal\";\n\t\t\t\t}\n\n\t\t\t\telse //Else say is empty\n\t\t\t\t{\n\t\t\t\t\timprovementsList[i].transform.Find (\"Name\").GetComponent().text = \"Empty\";\n\n\t\t\t\t\tif(selectedSlot == i)\n\t\t\t\t\t{\n\t\t\t\t\t\timprovementsList[i].GetComponent().enabled = false;\n\t\t\t\t\t\timprovementsList[i].GetComponent().spriteName = \"Button Hover (Orange)\";\n\t\t\t\t\t}\n\n\t\t\t\t\telse\n\t\t\t\t\t{\n\t\t\t\t\t\timprovementsList[i].GetComponent().enabled = true;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\telse //Else deactivate\n\t\t\t{\n\t\t\t\tNGUITools.SetActive(improvementsList[i], false);\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate void UpdateTabs()\n\t{\n\t\tfor(int i = 0; i < tabs.Length; ++i)\n\t\t{\n\t\t\tif(i <= improvementsBasic.techTier)\n\t\t\t{\n\t\t\t\tif(tabs[i].GetComponent().spriteName == \"Button Hover (Orange)\")\n\t\t\t\t{\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\ttabs[i].GetComponent().enabled = true;\n\t\t\t\t\ttabs[i].GetComponent().spriteName = \"Button Normal\";\n\t\t\t\t}\n\t\t\t}\n\t\t\telse\n\t\t\t{\n\t\t\t\ttabs[i].GetComponent().enabled = false;\n\t\t\t\ttabs[i].GetComponent().spriteName = \"Button Deactivated\";\n\t\t\t}\n\t\t}\n\t}\n\t\n\tpublic void TabClick()\n\t{\n\t\tNGUITools.SetActive (improvementDetails, false);\n\t\tcurrentImprovement = null;\n\n\t\tfor(int i = 0; i < tabs.Length; ++i)\n\t\t{\n\t\t\tif(tabs[i] == UIButton.current.gameObject)\n\t\t\t{\n\t\t\t\ttabs[i].GetComponent().enabled = false;\n\t\t\t\ttabs[i].GetComponent().spriteName = \"Button Hover (Orange)\";\n\t\t\t\tUpdateImprovementsWindow(i);\n\t\t\t}\n\n\t\t\telse\n\t\t\t{\n\t\t\t\tif(i <= improvementsBasic.techTier)\n\t\t\t\t{\n\t\t\t\t\ttabs[i].GetComponent().enabled = true;\n\t\t\t\t\ttabs[i].GetComponent().spriteName = \"Button Normal\";\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\ttabs[i].GetComponent().enabled = false;\n\t\t\t\t\ttabs[i].GetComponent().spriteName = \"Button Deactivated\";\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate void UpdateUpkeep()\n\t{\n\t\tfloat upkeepWealth = 0, upkeepPower = 0;\n\t\t\n\t\tfor(int i = 0; i < MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].improvementsBuilt.Count; ++i)\n\t\t{\n\t\t\tfor(int j = 0; j < MasterScript.systemListConstructor.basicImprovementsList.Count; ++j)\n\t\t\t{\n\t\t\t\tif(improvementsBasic.listOfImprovements[j].hasBeenBuilt == false)\n\t\t\t\t{\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\t\n\t\t\t\tif(MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].improvementsBuilt[i] == MasterScript.systemListConstructor.basicImprovementsList[j].name)\n\t\t\t\t{\n\t\t\t\t\tupkeepWealth += MasterScript.systemListConstructor.basicImprovementsList[j].wealthUpkeep;\n\t\t\t\t\tupkeepPower += MasterScript.systemListConstructor.basicImprovementsList[j].powerUpkeep;\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t\n\t\tsystemUpkeepPower.text = upkeepPower.ToString();\n\t\tsystemUpkeepWealth.text = upkeepWealth.ToString ();\n\t}\n\t\n\tvoid Update()\n\t{\n\t\tif(MasterScript.systemGUI.selectedSystem != selectedSystem)\n\t\t{\n\t\t\tNGUITools.SetActive(improvementsWindow, false);\n\t\t\tselectedSystem = MasterScript.systemGUI.selectedSystem;\n\t\t\timprovementsBasic = MasterScript.systemListConstructor.systemList [selectedSystem].systemObject.GetComponent ();\n\t\t}\n\n\t\tif(MasterScript.cameraFunctionsScript.openMenu == true)\n\t\t{\n\t\t\tif(selectedPlanet != -1)\n\t\t\t{\n\t\t\t\tif(improvementsWindow.activeInHierarchy == true)\n\t\t\t\t{\n\t\t\t\t\tUpdateTabs();\n\t\t\t\t}\n\n\t\t\t\tUpdateBuiltImprovements();\n\t\t\t\tUpdateSystemEffects (selectedSystem, selectedPlanet);\n\t\t\t\tUpdateUpkeep();\n\t\t\t}\n\t\t}\n\t\t\n\t\tif(Input.GetKeyDown(\"c\"))\n\t\t{\n\t\t\tNGUITools.SetActive(availableImprovements, false);\n\t\t}\n\t}\n\n\tpublic void UpdateSystemEffects(int system, int planet) //TODO this needs to be planet specific\n\t{\n\t\timprovementsBasic = MasterScript.systemListConstructor.systemList[selectedSystem].systemObject.GetComponent();\n\n\t\tstring temp = \"\";\n\n\t\tfloat knoTemp = (MasterScript.systemListConstructor.systemList[system].sysKnowledgeModifier + MasterScript.systemListConstructor.systemList[system].planetsInSystem[planet].knowledgeModifier) - 1;\n\t\tfloat powTemp = (MasterScript.systemListConstructor.systemList[system].sysPowerModifier + MasterScript.systemListConstructor.systemList[system].planetsInSystem[planet].powerModifier) - 1;\n\t\tfloat growTemp = MasterScript.systemListConstructor.systemList[system].sysGrowthModifier + MasterScript.systemListConstructor.systemList[system].planetsInSystem[planet].growthModifier;\n\t\tfloat popTemp = MasterScript.systemListConstructor.systemList[system].sysMaxPopulationModifier + MasterScript.systemListConstructor.systemList[system].planetsInSystem[planet].maxPopulationModifier;\n\t\tfloat amberPenalty = MasterScript.systemListConstructor.systemList[system].sysAmberPenalty + MasterScript.systemListConstructor.systemList[system].planetsInSystem[planet].amberPenalty;\n\t\tfloat amberProd = (MasterScript.systemListConstructor.systemList[system].sysAmberModifier + MasterScript.systemListConstructor.systemList[system].planetsInSystem[planet].amberModifier) - 1;\n\n\t\tif(knoTemp != 0f)\n\t\t{\n\t\t\tif(temp != \"\")\n\t\t\t{\n\t\t\t\ttemp = temp + \"\\n+\";\n\t\t\t}\n\n\t\t\ttemp = temp + Math.Round(knoTemp * 100, 1) + \"% Knowledge from Improvements\";\n\t\t}\n\t\tif(powTemp != 0f)\n\t\t{\n\t\t\tif(temp != \"\")\n\t\t\t{\n\t\t\t\ttemp = temp + \"\\n+\";\n\t\t\t}\n\n\t\t\ttemp = temp + Math.Round(powTemp * 100, 1) + \"% Power from Improvements\";\n\t\t}\n\t\tif(growTemp != 0f)\n\t\t{\n\t\t\tif(temp != \"\")\n\t\t\t{\n\t\t\t\ttemp = temp + \"\\n+\";\n\t\t\t}\n\n\t\t\ttemp = temp + Math.Round(growTemp, 2) + \"% Growth from Improvements\";\n\t\t}\n\t\tif(popTemp != 0f)\n\t\t{\n\t\t\tif(temp != \"\")\n\t\t\t{\n\t\t\t\ttemp = temp + \"\\n+\";\n\t\t\t}\n\n\t\t\ttemp = temp + Math.Round(popTemp, 1) + \"% Population from Improvements\";\n\t\t}\n\n\t\tint standardSize = MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].baseImprovementSlots;\n\n\t\tif(MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].currentImprovementSlots > standardSize)\n\t\t{\n\t\t\tif(temp != \"\")\n\t\t\t{\n\t\t\t\ttemp = temp + \"\\n+\";\n\t\t\t}\n\n\t\t\ttemp = temp + (MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].currentImprovementSlots - standardSize).ToString() + \" Improvement Slots on Planet\";\n\t\t}\n\t\tif(amberPenalty != 1f)\n\t\t{\n\t\t\tif(temp != \"\")\n\t\t\t{\n\t\t\t\ttemp = temp + \"\\n+\";\n\t\t\t}\n\n\t\t\ttemp = temp + Math.Round ((1 - amberPenalty) * 100, 1) + \"% Amber Penalty on System\";\n\t\t}\n\t\tif(amberProd != 0)\n\t\t{\n\t\t\tif(temp != \"\")\n\t\t\t{\n\t\t\t\ttemp = temp + \"\\n+\";\n\t\t\t}\n\n\t\t\ttemp = temp + Math.Round (amberProd * 100, 1) + \"% Amber Production on System\";\n\t\t}\n\t\tif(improvementsBasic.improvementCostModifier != 0f)\n\t\t{\n\t\t\tif(temp != \"\")\n\t\t\t{\n\t\t\t\ttemp = temp + \"\\n\";\n\t\t\t}\n\n\t\t\ttemp = temp + improvementsBasic.improvementCostModifier + \" less Power required for Improvements\";\n\t\t}\n\t\tif(improvementsBasic.researchCost != 0f)\n\t\t{\n\t\t\tif(temp != \"\")\n\t\t\t{\n\t\t\t\ttemp = temp + \"\\n\";\n\t\t\t}\n\n\t\t\ttemp = temp + improvementsBasic.researchCost + \" less Knowledge required for Research\";\n\t\t}\n\n\t\t/*\n\t\tamberPointBonus;\n\t\tpublic float tempWealth, tempKnwlUnitBonus, tempPowUnitBonus, tempResearchCostReduction, tempImprovementCostReduction, \n\t\ttempBonusAmbition;\n\n\t\tfor(int i = 0; i < improvementsBasic.listOfImprovements.Count; ++i)\n\t\t{\n\t\t\tif(improvementsBasic.listOfImprovements[i].hasBeenBuilt == true)\n\t\t\t{\n\t\t\t\tif(temp == \"\")\n\t\t\t\t{\n\t\t\t\t\ttemp = improvementsBasic.listOfImprovements[i].improvementMessage.ToUpper();\n\t\t\t\t}\n\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\ttemp = temp + \"\\n\" + improvementsBasic.listOfImprovements[i].improvementMessage.ToUpper();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t*/\n\n\t\tif(temp == \"\")\n\t\t{\n\t\t\ttemp = \"NO EFFECTS ON SYSTEM\";\n\t\t}\n\n\t\tsystemEffects.text = temp;\n\t\tsystemEffects.transform.parent.GetComponent ().height = systemEffects.height + 20;\n\t}\n\n\tpublic void BuildImprovement()\n\t{\n\t\tNGUITools.SetActive (improvementDetails, false);\n\n\t\timprovementsBasic = MasterScript.systemListConstructor.systemList[selectedSystem].systemObject.GetComponent();\n\t\t\n\t\tfor(int i = 0; i < improvementsBasic.listOfImprovements.Count; ++i)\n\t\t{\n\t\t\tif(improvementsBasic.listOfImprovements[i].improvementName.ToUpper () == currentImprovement)\n\t\t\t{\n\t\t\t\tfor(int j = 0; j < MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].currentImprovementSlots; ++j)\n\t\t\t\t{\n\t\t\t\t\tif(MasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].improvementsBuilt[j] == null)\n\t\t\t\t\t{\n\t\t\t\t\t\tif(improvementsBasic.ImproveSystem(i) == true)\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\timprovementsBasic.ActiveTechnologies(selectedSystem, MasterScript.playerTurnScript);\n\t\t\t\t\t\t\tMasterScript.systemListConstructor.systemList[selectedSystem].planetsInSystem[selectedPlanet].improvementsBuilt[j] = improvementsBasic.listOfImprovements[i].improvementName;\n\t\t\t\t\t\t\tUpdateImprovementsWindow(improvementsBasic.listOfImprovements[i].improvementLevel);\n\t\t\t\t\t\t\tUpdateBuiltImprovements();\n\t\t\t\t\t\t\tcurrentImprovement = null;\n\t\t\t\t\t\t\tselectedSlot = -1;\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tfor(int i = 0; i < unbuiltImprovementList.Length; ++i)\n\t\t{\n\t\t\tunbuiltImprovementList[i].GetComponent().spriteName = \"Button Normal\";\n\t\t\tunbuiltImprovementList[i].GetComponent().enabled = true;\n\t\t}\n\n\t\tNGUITools.SetActive(improvementsWindow, false);\n\t}\n\t\n\tprivate void CheckForTierUnlock()\n\t{\n\t\tfor(int i = 0; i < 4; ++i)\n\t\t{\n\t\t\tUIButton temp = tabs[i].gameObject.GetComponent();\n\n\t\t\tif(improvementsBasic.techTier >= i && temp.enabled == false)\n\t\t\t{\n\t\t\t\ttemp.enabled = true;\n\t\t\t}\n\t\t\tif(improvementsBasic.techTier < i && temp.enabled == true)\n\t\t\t{\n\t\t\t\ttemp.enabled = false;\n\t\t\t}\n\t\t}\n\t}\n}\n", "meta": {"content_hash": "6c52717d8bbbbb1ae9c235432a053de8", "timestamp": "", "source": "github", "line_count": 490, "max_line_length": 216, "avg_line_length": 31.63469387755102, "alnum_prop": 0.7123411392813367, "repo_name": "Shemamforash/Crucible", "id": "84fa05cdf43143fdf158fe3ded82afdb878c135b", "size": "15503", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Scripts/UIScripts/SystemScrollviews.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "376581"}]}} {"text": "r\"\"\"HTTP cookie handling for web clients.\n\nThis module has (now fairly distant) origins in Gisle Aas' Perl module\nHTTP::Cookies, from the libwww-perl library.\n\nDocstrings, comments and debug strings in this code refer to the\nattributes of the HTTP cookie system as cookie-attributes, to distinguish\nthem clearly from Python attributes.\n\nClass diagram (note that BSDDBCookieJar and the MSIE* classes are not\ndistributed with the Python standard library, but are available from\nhttp://wwwsearch.sf.net/):\n\n CookieJar____\n / \\ \\\n FileCookieJar \\ \\\n / | \\ \\ \\\n MozillaCookieJar | LWPCookieJar \\ \\\n | | \\\n | ---MSIEBase | \\\n | / | | \\\n | / MSIEDBCookieJar BSDDBCookieJar\n |/\n MSIECookieJar\n\n\"\"\"\n\n__all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy',\n 'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar']\n\nimport copy\nimport datetime\nimport re\nimport time\nimport urllib.parse, urllib.request\ntry:\n import threading as _threading\nexcept ImportError:\n import dummy_threading as _threading\nimport http.client # only for the default HTTP port\nfrom calendar import timegm\n\ndebug = False # set to True to enable debugging via the logging module\nlogger = None\n\ndef _debug(*args):\n if not debug:\n return\n global logger\n if not logger:\n import logging\n logger = logging.getLogger(\"http.cookiejar\")\n return logger.debug(*args)\n\n\nDEFAULT_HTTP_PORT = str(http.client.HTTP_PORT)\nMISSING_FILENAME_TEXT = (\"a filename was not supplied (nor was the CookieJar \"\n \"instance initialised with one)\")\n\ndef _warn_unhandled_exception():\n # There are a few catch-all except: statements in this module, for\n # catching input that's bad in unexpected ways. Warn if any\n # exceptions are caught there.\n import io, warnings, traceback\n f = io.StringIO()\n traceback.print_exc(None, f)\n msg = f.getvalue()\n warnings.warn(\"http.cookiejar bug!\\n%s\" % msg, stacklevel=2)\n\n\n# Date/time conversion\n# -----------------------------------------------------------------------------\n\nEPOCH_YEAR = 1970\ndef _timegm(tt):\n year, month, mday, hour, min, sec = tt[:6]\n if ((year >= EPOCH_YEAR) and (1 <= month <= 12) and (1 <= mday <= 31) and\n (0 <= hour <= 24) and (0 <= min <= 59) and (0 <= sec <= 61)):\n return timegm(tt)\n else:\n return None\n\nDAYS = [\"Mon\", \"Tue\", \"Wed\", \"Thu\", \"Fri\", \"Sat\", \"Sun\"]\nMONTHS = [\"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"Jun\",\n \"Jul\", \"Aug\", \"Sep\", \"Oct\", \"Nov\", \"Dec\"]\nMONTHS_LOWER = []\nfor month in MONTHS: MONTHS_LOWER.append(month.lower())\n\ndef time2isoz(t=None):\n \"\"\"Return a string representing time in seconds since epoch, t.\n\n If the function is called without an argument, it will use the current\n time.\n\n The format of the returned string is like \"YYYY-MM-DD hh:mm:ssZ\",\n representing Universal Time (UTC, aka GMT). An example of this format is:\n\n 1994-11-24 08:49:37Z\n\n \"\"\"\n if t is None:\n dt = datetime.datetime.utcnow()\n else:\n dt = datetime.datetime.utcfromtimestamp(t)\n return \"%04d-%02d-%02d %02d:%02d:%02dZ\" % (\n dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)\n\ndef time2netscape(t=None):\n \"\"\"Return a string representing time in seconds since epoch, t.\n\n If the function is called without an argument, it will use the current\n time.\n\n The format of the returned string is like this:\n\n Wed, DD-Mon-YYYY HH:MM:SS GMT\n\n \"\"\"\n if t is None:\n dt = datetime.datetime.utcnow()\n else:\n dt = datetime.datetime.utcfromtimestamp(t)\n return \"%s %02d-%s-%04d %02d:%02d:%02d GMT\" % (\n DAYS[dt.weekday()], dt.day, MONTHS[dt.month-1],\n dt.year, dt.hour, dt.minute, dt.second)\n\n\nUTC_ZONES = {\"GMT\": None, \"UTC\": None, \"UT\": None, \"Z\": None}\n\nTIMEZONE_RE = re.compile(r\"^([-+])?(\\d\\d?):?(\\d\\d)?$\", re.ASCII)\ndef offset_from_tz_string(tz):\n offset = None\n if tz in UTC_ZONES:\n offset = 0\n else:\n m = TIMEZONE_RE.search(tz)\n if m:\n offset = 3600 * int(m.group(2))\n if m.group(3):\n offset = offset + 60 * int(m.group(3))\n if m.group(1) == '-':\n offset = -offset\n return offset\n\ndef _str2time(day, mon, yr, hr, min, sec, tz):\n # translate month name to number\n # month numbers start with 1 (January)\n try:\n mon = MONTHS_LOWER.index(mon.lower())+1\n except ValueError:\n # maybe it's already a number\n try:\n imon = int(mon)\n except ValueError:\n return None\n if 1 <= imon <= 12:\n mon = imon\n else:\n return None\n\n # make sure clock elements are defined\n if hr is None: hr = 0\n if min is None: min = 0\n if sec is None: sec = 0\n\n yr = int(yr)\n day = int(day)\n hr = int(hr)\n min = int(min)\n sec = int(sec)\n\n if yr < 1000:\n # find \"obvious\" year\n cur_yr = time.localtime(time.time())[0]\n m = cur_yr % 100\n tmp = yr\n yr = yr + cur_yr - m\n m = m - tmp\n if abs(m) > 50:\n if m > 0: yr = yr + 100\n else: yr = yr - 100\n\n # convert UTC time tuple to seconds since epoch (not timezone-adjusted)\n t = _timegm((yr, mon, day, hr, min, sec, tz))\n\n if t is not None:\n # adjust time using timezone string, to get absolute time since epoch\n if tz is None:\n tz = \"UTC\"\n tz = tz.upper()\n offset = offset_from_tz_string(tz)\n if offset is None:\n return None\n t = t - offset\n\n return t\n\nSTRICT_DATE_RE = re.compile(\n r\"^[SMTWF][a-z][a-z], (\\d\\d) ([JFMASOND][a-z][a-z]) \"\n \"(\\d\\d\\d\\d) (\\d\\d):(\\d\\d):(\\d\\d) GMT$\", re.ASCII)\nWEEKDAY_RE = re.compile(\n r\"^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\\s*\", re.I | re.ASCII)\nLOOSE_HTTP_DATE_RE = re.compile(\n r\"\"\"^\n (\\d\\d?) # day\n (?:\\s+|[-\\/])\n (\\w+) # month\n (?:\\s+|[-\\/])\n (\\d+) # year\n (?:\n (?:\\s+|:) # separator before clock\n (\\d\\d?):(\\d\\d) # hour:min\n (?::(\\d\\d))? # optional seconds\n )? # optional clock\n \\s*\n ([-+]?\\d{2,4}|(?![APap][Mm]\\b)[A-Za-z]+)? # timezone\n \\s*\n (?:\\(\\w+\\))? # ASCII representation of timezone in parens.\n \\s*$\"\"\", re.X | re.ASCII)\ndef http2time(text):\n \"\"\"Returns time in seconds since epoch of time represented by a string.\n\n Return value is an integer.\n\n None is returned if the format of str is unrecognized, the time is outside\n the representable range, or the timezone string is not recognized. If the\n string contains no timezone, UTC is assumed.\n\n The timezone in the string may be numerical (like \"-0800\" or \"+0100\") or a\n string timezone (like \"UTC\", \"GMT\", \"BST\" or \"EST\"). Currently, only the\n timezone strings equivalent to UTC (zero offset) are known to the function.\n\n The function loosely parses the following formats:\n\n Wed, 09 Feb 1994 22:23:32 GMT -- HTTP format\n Tuesday, 08-Feb-94 14:15:29 GMT -- old rfc850 HTTP format\n Tuesday, 08-Feb-1994 14:15:29 GMT -- broken rfc850 HTTP format\n 09 Feb 1994 22:23:32 GMT -- HTTP format (no weekday)\n 08-Feb-94 14:15:29 GMT -- rfc850 format (no weekday)\n 08-Feb-1994 14:15:29 GMT -- broken rfc850 format (no weekday)\n\n The parser ignores leading and trailing whitespace. The time may be\n absent.\n\n If the year is given with only 2 digits, the function will select the\n century that makes the year closest to the current date.\n\n \"\"\"\n # fast exit for strictly conforming string\n m = STRICT_DATE_RE.search(text)\n if m:\n g = m.groups()\n mon = MONTHS_LOWER.index(g[1].lower()) + 1\n tt = (int(g[2]), mon, int(g[0]),\n int(g[3]), int(g[4]), float(g[5]))\n return _timegm(tt)\n\n # No, we need some messy parsing...\n\n # clean up\n text = text.lstrip()\n text = WEEKDAY_RE.sub(\"\", text, 1) # Useless weekday\n\n # tz is time zone specifier string\n day, mon, yr, hr, min, sec, tz = [None]*7\n\n # loose regexp parse\n m = LOOSE_HTTP_DATE_RE.search(text)\n if m is not None:\n day, mon, yr, hr, min, sec, tz = m.groups()\n else:\n return None # bad format\n\n return _str2time(day, mon, yr, hr, min, sec, tz)\n\nISO_DATE_RE = re.compile(\n \"\"\"^\n (\\d{4}) # year\n [-\\/]?\n (\\d\\d?) # numerical month\n [-\\/]?\n (\\d\\d?) # day\n (?:\n (?:\\s+|[-:Tt]) # separator before clock\n (\\d\\d?):?(\\d\\d) # hour:min\n (?::?(\\d\\d(?:\\.\\d*)?))? # optional seconds (and fractional)\n )? # optional clock\n \\s*\n ([-+]?\\d\\d?:?(:?\\d\\d)?\n |Z|z)? # timezone (Z is \"zero meridian\", i.e. GMT)\n \\s*$\"\"\", re.X | re. ASCII)\ndef iso2time(text):\n \"\"\"\n As for http2time, but parses the ISO 8601 formats:\n\n 1994-02-03 14:15:29 -0100 -- ISO 8601 format\n 1994-02-03 14:15:29 -- zone is optional\n 1994-02-03 -- only date\n 1994-02-03T14:15:29 -- Use T as separator\n 19940203T141529Z -- ISO 8601 compact format\n 19940203 -- only date\n\n \"\"\"\n # clean up\n text = text.lstrip()\n\n # tz is time zone specifier string\n day, mon, yr, hr, min, sec, tz = [None]*7\n\n # loose regexp parse\n m = ISO_DATE_RE.search(text)\n if m is not None:\n # XXX there's an extra bit of the timezone I'm ignoring here: is\n # this the right thing to do?\n yr, mon, day, hr, min, sec, tz, _ = m.groups()\n else:\n return None # bad format\n\n return _str2time(day, mon, yr, hr, min, sec, tz)\n\n\n# Header parsing\n# -----------------------------------------------------------------------------\n\ndef unmatched(match):\n \"\"\"Return unmatched part of re.Match object.\"\"\"\n start, end = match.span(0)\n return match.string[:start]+match.string[end:]\n\nHEADER_TOKEN_RE = re.compile(r\"^\\s*([^=\\s;,]+)\")\nHEADER_QUOTED_VALUE_RE = re.compile(r\"^\\s*=\\s*\\\"([^\\\"\\\\]*(?:\\\\.[^\\\"\\\\]*)*)\\\"\")\nHEADER_VALUE_RE = re.compile(r\"^\\s*=\\s*([^\\s;,]*)\")\nHEADER_ESCAPE_RE = re.compile(r\"\\\\(.)\")\ndef split_header_words(header_values):\n r\"\"\"Parse header values into a list of lists containing key,value pairs.\n\n The function knows how to deal with \",\", \";\" and \"=\" as well as quoted\n values after \"=\". A list of space separated tokens are parsed as if they\n were separated by \";\".\n\n If the header_values passed as argument contains multiple values, then they\n are treated as if they were a single value separated by comma \",\".\n\n This means that this function is useful for parsing header fields that\n follow this syntax (BNF as from the HTTP/1.1 specification, but we relax\n the requirement for tokens).\n\n headers = #header\n header = (token | parameter) *( [\";\"] (token | parameter))\n\n token = 1*\n separators = \"(\" | \")\" | \"<\" | \">\" | \"@\"\n | \",\" | \";\" | \":\" | \"\\\" | <\">\n | \"/\" | \"[\" | \"]\" | \"?\" | \"=\"\n | \"{\" | \"}\" | SP | HT\n\n quoted-string = ( <\"> *(qdtext | quoted-pair ) <\"> )\n qdtext = >\n quoted-pair = \"\\\" CHAR\n\n parameter = attribute \"=\" value\n attribute = token\n value = token | quoted-string\n\n Each header is represented by a list of key/value pairs. The value for a\n simple token (not part of a parameter) is None. Syntactically incorrect\n headers will not necessarily be parsed as you would want.\n\n This is easier to describe with some examples:\n\n >>> split_header_words(['foo=\"bar\"; port=\"80,81\"; discard, bar=baz'])\n [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]]\n >>> split_header_words(['text/html; charset=\"iso-8859-1\"'])\n [[('text/html', None), ('charset', 'iso-8859-1')]]\n >>> split_header_words([r'Basic realm=\"\\\"foo\\bar\\\"\"'])\n [[('Basic', None), ('realm', '\"foobar\"')]]\n\n \"\"\"\n assert not isinstance(header_values, str)\n result = []\n for text in header_values:\n orig_text = text\n pairs = []\n while text:\n m = HEADER_TOKEN_RE.search(text)\n if m:\n text = unmatched(m)\n name = m.group(1)\n m = HEADER_QUOTED_VALUE_RE.search(text)\n if m: # quoted value\n text = unmatched(m)\n value = m.group(1)\n value = HEADER_ESCAPE_RE.sub(r\"\\1\", value)\n else:\n m = HEADER_VALUE_RE.search(text)\n if m: # unquoted value\n text = unmatched(m)\n value = m.group(1)\n value = value.rstrip()\n else:\n # no value, a lone token\n value = None\n pairs.append((name, value))\n elif text.lstrip().startswith(\",\"):\n # concatenated headers, as per RFC 2616 section 4.2\n text = text.lstrip()[1:]\n if pairs: result.append(pairs)\n pairs = []\n else:\n # skip junk\n non_junk, nr_junk_chars = re.subn(\"^[=\\s;]*\", \"\", text)\n assert nr_junk_chars > 0, (\n \"split_header_words bug: '%s', '%s', %s\" %\n (orig_text, text, pairs))\n text = non_junk\n if pairs: result.append(pairs)\n return result\n\nHEADER_JOIN_ESCAPE_RE = re.compile(r\"([\\\"\\\\])\")\ndef join_header_words(lists):\n \"\"\"Do the inverse (almost) of the conversion done by split_header_words.\n\n Takes a list of lists of (key, value) pairs and produces a single header\n value. Attribute values are quoted if needed.\n\n >>> join_header_words([[(\"text/plain\", None), (\"charset\", \"iso-8859/1\")]])\n 'text/plain; charset=\"iso-8859/1\"'\n >>> join_header_words([[(\"text/plain\", None)], [(\"charset\", \"iso-8859/1\")]])\n 'text/plain, charset=\"iso-8859/1\"'\n\n \"\"\"\n headers = []\n for pairs in lists:\n attr = []\n for k, v in pairs:\n if v is not None:\n if not re.search(r\"^\\w+$\", v):\n v = HEADER_JOIN_ESCAPE_RE.sub(r\"\\\\\\1\", v) # escape \" and \\\n v = '\"%s\"' % v\n k = \"%s=%s\" % (k, v)\n attr.append(k)\n if attr: headers.append(\"; \".join(attr))\n return \", \".join(headers)\n\ndef strip_quotes(text):\n if text.startswith('\"'):\n text = text[1:]\n if text.endswith('\"'):\n text = text[:-1]\n return text\n\ndef parse_ns_headers(ns_headers):\n \"\"\"Ad-hoc parser for Netscape protocol cookie-attributes.\n\n The old Netscape cookie format for Set-Cookie can for instance contain\n an unquoted \",\" in the expires field, so we have to use this ad-hoc\n parser instead of split_header_words.\n\n XXX This may not make the best possible effort to parse all the crap\n that Netscape Cookie headers contain. Ronald Tschalar's HTTPClient\n parser is probably better, so could do worse than following that if\n this ever gives any trouble.\n\n Currently, this is also used for parsing RFC 2109 cookies.\n\n \"\"\"\n known_attrs = (\"expires\", \"domain\", \"path\", \"secure\",\n # RFC 2109 attrs (may turn up in Netscape cookies, too)\n \"version\", \"port\", \"max-age\")\n\n result = []\n for ns_header in ns_headers:\n pairs = []\n version_set = False\n for ii, param in enumerate(re.split(r\";\\s*\", ns_header)):\n param = param.rstrip()\n if param == \"\": continue\n if \"=\" not in param:\n k, v = param, None\n else:\n k, v = re.split(r\"\\s*=\\s*\", param, 1)\n k = k.lstrip()\n if ii != 0:\n lc = k.lower()\n if lc in known_attrs:\n k = lc\n if k == \"version\":\n # This is an RFC 2109 cookie.\n v = strip_quotes(v)\n version_set = True\n if k == \"expires\":\n # convert expires date to seconds since epoch\n v = http2time(strip_quotes(v)) # None if invalid\n pairs.append((k, v))\n\n if pairs:\n if not version_set:\n pairs.append((\"version\", \"0\"))\n result.append(pairs)\n\n return result\n\n\nIPV4_RE = re.compile(r\"\\.\\d+$\", re.ASCII)\ndef is_HDN(text):\n \"\"\"Return True if text is a host domain name.\"\"\"\n # XXX\n # This may well be wrong. Which RFC is HDN defined in, if any (for\n # the purposes of RFC 2965)?\n # For the current implementation, what about IPv6? Remember to look\n # at other uses of IPV4_RE also, if change this.\n if IPV4_RE.search(text):\n return False\n if text == \"\":\n return False\n if text[0] == \".\" or text[-1] == \".\":\n return False\n return True\n\ndef domain_match(A, B):\n \"\"\"Return True if domain A domain-matches domain B, according to RFC 2965.\n\n A and B may be host domain names or IP addresses.\n\n RFC 2965, section 1:\n\n Host names can be specified either as an IP address or a HDN string.\n Sometimes we compare one host name with another. (Such comparisons SHALL\n be case-insensitive.) Host A's name domain-matches host B's if\n\n * their host name strings string-compare equal; or\n\n * A is a HDN string and has the form NB, where N is a non-empty\n name string, B has the form .B', and B' is a HDN string. (So,\n x.y.com domain-matches .Y.com but not Y.com.)\n\n Note that domain-match is not a commutative operation: a.b.c.com\n domain-matches .c.com, but not the reverse.\n\n \"\"\"\n # Note that, if A or B are IP addresses, the only relevant part of the\n # definition of the domain-match algorithm is the direct string-compare.\n A = A.lower()\n B = B.lower()\n if A == B:\n return True\n if not is_HDN(A):\n return False\n i = A.rfind(B)\n if i == -1 or i == 0:\n # A does not have form NB, or N is the empty string\n return False\n if not B.startswith(\".\"):\n return False\n if not is_HDN(B[1:]):\n return False\n return True\n\ndef liberal_is_HDN(text):\n \"\"\"Return True if text is a sort-of-like a host domain name.\n\n For accepting/blocking domains.\n\n \"\"\"\n if IPV4_RE.search(text):\n return False\n return True\n\ndef user_domain_match(A, B):\n \"\"\"For blocking/accepting domains.\n\n A and B may be host domain names or IP addresses.\n\n \"\"\"\n A = A.lower()\n B = B.lower()\n if not (liberal_is_HDN(A) and liberal_is_HDN(B)):\n if A == B:\n # equal IP addresses\n return True\n return False\n initial_dot = B.startswith(\".\")\n if initial_dot and A.endswith(B):\n return True\n if not initial_dot and A == B:\n return True\n return False\n\ncut_port_re = re.compile(r\":\\d+$\", re.ASCII)\ndef request_host(request):\n \"\"\"Return request-host, as defined by RFC 2965.\n\n Variation from RFC: returned value is lowercased, for convenient\n comparison.\n\n \"\"\"\n url = request.get_full_url()\n host = urllib.parse.urlparse(url)[1]\n if host == \"\":\n host = request.get_header(\"Host\", \"\")\n\n # remove port, if present\n host = cut_port_re.sub(\"\", host, 1)\n return host.lower()\n\ndef eff_request_host(request):\n \"\"\"Return a tuple (request-host, effective request-host name).\n\n As defined by RFC 2965, except both are lowercased.\n\n \"\"\"\n erhn = req_host = request_host(request)\n if req_host.find(\".\") == -1 and not IPV4_RE.search(req_host):\n erhn = req_host + \".local\"\n return req_host, erhn\n\ndef request_path(request):\n \"\"\"Path component of request-URI, as defined by RFC 2965.\"\"\"\n url = request.get_full_url()\n parts = urllib.parse.urlsplit(url)\n path = escape_path(parts.path)\n if not path.startswith(\"/\"):\n # fix bad RFC 2396 absoluteURI\n path = \"/\" + path\n return path\n\ndef request_port(request):\n host = request.host\n i = host.find(':')\n if i >= 0:\n port = host[i+1:]\n try:\n int(port)\n except ValueError:\n _debug(\"nonnumeric port: '%s'\", port)\n return None\n else:\n port = DEFAULT_HTTP_PORT\n return port\n\n# Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't\n# need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738).\nHTTP_PATH_SAFE = \"%/;:@&=+$,!~*'()\"\nESCAPED_CHAR_RE = re.compile(r\"%([0-9a-fA-F][0-9a-fA-F])\")\ndef uppercase_escaped_char(match):\n return \"%%%s\" % match.group(1).upper()\ndef escape_path(path):\n \"\"\"Escape any invalid characters in HTTP URL, and uppercase all escapes.\"\"\"\n # There's no knowing what character encoding was used to create URLs\n # containing %-escapes, but since we have to pick one to escape invalid\n # path characters, we pick UTF-8, as recommended in the HTML 4.0\n # specification:\n # http://www.w3.org/TR/REC-html40/appendix/notes.html#h-B.2.1\n # And here, kind of: draft-fielding-uri-rfc2396bis-03\n # (And in draft IRI specification: draft-duerst-iri-05)\n # (And here, for new URI schemes: RFC 2718)\n path = urllib.parse.quote(path, HTTP_PATH_SAFE)\n path = ESCAPED_CHAR_RE.sub(uppercase_escaped_char, path)\n return path\n\ndef reach(h):\n \"\"\"Return reach of host h, as defined by RFC 2965, section 1.\n\n The reach R of a host name H is defined as follows:\n\n * If\n\n - H is the host domain name of a host; and,\n\n - H has the form A.B; and\n\n - A has no embedded (that is, interior) dots; and\n\n - B has at least one embedded dot, or B is the string \"local\".\n then the reach of H is .B.\n\n * Otherwise, the reach of H is H.\n\n >>> reach(\"www.acme.com\")\n '.acme.com'\n >>> reach(\"acme.com\")\n 'acme.com'\n >>> reach(\"acme.local\")\n '.local'\n\n \"\"\"\n i = h.find(\".\")\n if i >= 0:\n #a = h[:i] # this line is only here to show what a is\n b = h[i+1:]\n i = b.find(\".\")\n if is_HDN(h) and (i >= 0 or b == \"local\"):\n return \".\"+b\n return h\n\ndef is_third_party(request):\n \"\"\"\n\n RFC 2965, section 3.3.6:\n\n An unverifiable transaction is to a third-party host if its request-\n host U does not domain-match the reach R of the request-host O in the\n origin transaction.\n\n \"\"\"\n req_host = request_host(request)\n if not domain_match(req_host, reach(request.origin_req_host)):\n return True\n else:\n return False\n\n\nclass Cookie:\n \"\"\"HTTP Cookie.\n\n This class represents both Netscape and RFC 2965 cookies.\n\n This is deliberately a very simple class. It just holds attributes. It's\n possible to construct Cookie instances that don't comply with the cookie\n standards. CookieJar.make_cookies is the factory function for Cookie\n objects -- it deals with cookie parsing, supplying defaults, and\n normalising to the representation used in this class. CookiePolicy is\n responsible for checking them to see whether they should be accepted from\n and returned to the server.\n\n Note that the port may be present in the headers, but unspecified (\"Port\"\n rather than\"Port=80\", for example); if this is the case, port is None.\n\n \"\"\"\n\n def __init__(self, version, name, value,\n port, port_specified,\n domain, domain_specified, domain_initial_dot,\n path, path_specified,\n secure,\n expires,\n discard,\n comment,\n comment_url,\n rest,\n rfc2109=False,\n ):\n\n if version is not None: version = int(version)\n if expires is not None: expires = int(expires)\n if port is None and port_specified is True:\n raise ValueError(\"if port is None, port_specified must be false\")\n\n self.version = version\n self.name = name\n self.value = value\n self.port = port\n self.port_specified = port_specified\n # normalise case, as per RFC 2965 section 3.3.3\n self.domain = domain.lower()\n self.domain_specified = domain_specified\n # Sigh. We need to know whether the domain given in the\n # cookie-attribute had an initial dot, in order to follow RFC 2965\n # (as clarified in draft errata). Needed for the returned $Domain\n # value.\n self.domain_initial_dot = domain_initial_dot\n self.path = path\n self.path_specified = path_specified\n self.secure = secure\n self.expires = expires\n self.discard = discard\n self.comment = comment\n self.comment_url = comment_url\n self.rfc2109 = rfc2109\n\n self._rest = copy.copy(rest)\n\n def has_nonstandard_attr(self, name):\n return name in self._rest\n def get_nonstandard_attr(self, name, default=None):\n return self._rest.get(name, default)\n def set_nonstandard_attr(self, name, value):\n self._rest[name] = value\n\n def is_expired(self, now=None):\n if now is None: now = time.time()\n if (self.expires is not None) and (self.expires <= now):\n return True\n return False\n\n def __str__(self):\n if self.port is None: p = \"\"\n else: p = \":\"+self.port\n limit = self.domain + p + self.path\n if self.value is not None:\n namevalue = \"%s=%s\" % (self.name, self.value)\n else:\n namevalue = self.name\n return \"\" % (namevalue, limit)\n\n def __repr__(self):\n args = []\n for name in (\"version\", \"name\", \"value\",\n \"port\", \"port_specified\",\n \"domain\", \"domain_specified\", \"domain_initial_dot\",\n \"path\", \"path_specified\",\n \"secure\", \"expires\", \"discard\", \"comment\", \"comment_url\",\n ):\n attr = getattr(self, name)\n args.append(\"%s=%s\" % (name, repr(attr)))\n args.append(\"rest=%s\" % repr(self._rest))\n args.append(\"rfc2109=%s\" % repr(self.rfc2109))\n return \"Cookie(%s)\" % \", \".join(args)\n\n\nclass CookiePolicy:\n \"\"\"Defines which cookies get accepted from and returned to server.\n\n May also modify cookies, though this is probably a bad idea.\n\n The subclass DefaultCookiePolicy defines the standard rules for Netscape\n and RFC 2965 cookies -- override that if you want a customised policy.\n\n \"\"\"\n def set_ok(self, cookie, request):\n \"\"\"Return true if (and only if) cookie should be accepted from server.\n\n Currently, pre-expired cookies never get this far -- the CookieJar\n class deletes such cookies itself.\n\n \"\"\"\n raise NotImplementedError()\n\n def return_ok(self, cookie, request):\n \"\"\"Return true if (and only if) cookie should be returned to server.\"\"\"\n raise NotImplementedError()\n\n def domain_return_ok(self, domain, request):\n \"\"\"Return false if cookies should not be returned, given cookie domain.\n \"\"\"\n return True\n\n def path_return_ok(self, path, request):\n \"\"\"Return false if cookies should not be returned, given cookie path.\n \"\"\"\n return True\n\n\nclass DefaultCookiePolicy(CookiePolicy):\n \"\"\"Implements the standard rules for accepting and returning cookies.\"\"\"\n\n DomainStrictNoDots = 1\n DomainStrictNonDomain = 2\n DomainRFC2965Match = 4\n\n DomainLiberal = 0\n DomainStrict = DomainStrictNoDots|DomainStrictNonDomain\n\n def __init__(self,\n blocked_domains=None, allowed_domains=None,\n netscape=True, rfc2965=False,\n rfc2109_as_netscape=None,\n hide_cookie2=False,\n strict_domain=False,\n strict_rfc2965_unverifiable=True,\n strict_ns_unverifiable=False,\n strict_ns_domain=DomainLiberal,\n strict_ns_set_initial_dollar=False,\n strict_ns_set_path=False,\n ):\n \"\"\"Constructor arguments should be passed as keyword arguments only.\"\"\"\n self.netscape = netscape\n self.rfc2965 = rfc2965\n self.rfc2109_as_netscape = rfc2109_as_netscape\n self.hide_cookie2 = hide_cookie2\n self.strict_domain = strict_domain\n self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable\n self.strict_ns_unverifiable = strict_ns_unverifiable\n self.strict_ns_domain = strict_ns_domain\n self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar\n self.strict_ns_set_path = strict_ns_set_path\n\n if blocked_domains is not None:\n self._blocked_domains = tuple(blocked_domains)\n else:\n self._blocked_domains = ()\n\n if allowed_domains is not None:\n allowed_domains = tuple(allowed_domains)\n self._allowed_domains = allowed_domains\n\n def blocked_domains(self):\n \"\"\"Return the sequence of blocked domains (as a tuple).\"\"\"\n return self._blocked_domains\n def set_blocked_domains(self, blocked_domains):\n \"\"\"Set the sequence of blocked domains.\"\"\"\n self._blocked_domains = tuple(blocked_domains)\n\n def is_blocked(self, domain):\n for blocked_domain in self._blocked_domains:\n if user_domain_match(domain, blocked_domain):\n return True\n return False\n\n def allowed_domains(self):\n \"\"\"Return None, or the sequence of allowed domains (as a tuple).\"\"\"\n return self._allowed_domains\n def set_allowed_domains(self, allowed_domains):\n \"\"\"Set the sequence of allowed domains, or None.\"\"\"\n if allowed_domains is not None:\n allowed_domains = tuple(allowed_domains)\n self._allowed_domains = allowed_domains\n\n def is_not_allowed(self, domain):\n if self._allowed_domains is None:\n return False\n for allowed_domain in self._allowed_domains:\n if user_domain_match(domain, allowed_domain):\n return False\n return True\n\n def set_ok(self, cookie, request):\n \"\"\"\n If you override .set_ok(), be sure to call this method. If it returns\n false, so should your subclass (assuming your subclass wants to be more\n strict about which cookies to accept).\n\n \"\"\"\n _debug(\" - checking cookie %s=%s\", cookie.name, cookie.value)\n\n assert cookie.name is not None\n\n for n in \"version\", \"verifiability\", \"name\", \"path\", \"domain\", \"port\":\n fn_name = \"set_ok_\"+n\n fn = getattr(self, fn_name)\n if not fn(cookie, request):\n return False\n\n return True\n\n def set_ok_version(self, cookie, request):\n if cookie.version is None:\n # Version is always set to 0 by parse_ns_headers if it's a Netscape\n # cookie, so this must be an invalid RFC 2965 cookie.\n _debug(\" Set-Cookie2 without version attribute (%s=%s)\",\n cookie.name, cookie.value)\n return False\n if cookie.version > 0 and not self.rfc2965:\n _debug(\" RFC 2965 cookies are switched off\")\n return False\n elif cookie.version == 0 and not self.netscape:\n _debug(\" Netscape cookies are switched off\")\n return False\n return True\n\n def set_ok_verifiability(self, cookie, request):\n if request.unverifiable and is_third_party(request):\n if cookie.version > 0 and self.strict_rfc2965_unverifiable:\n _debug(\" third-party RFC 2965 cookie during \"\n \"unverifiable transaction\")\n return False\n elif cookie.version == 0 and self.strict_ns_unverifiable:\n _debug(\" third-party Netscape cookie during \"\n \"unverifiable transaction\")\n return False\n return True\n\n def set_ok_name(self, cookie, request):\n # Try and stop servers setting V0 cookies designed to hack other\n # servers that know both V0 and V1 protocols.\n if (cookie.version == 0 and self.strict_ns_set_initial_dollar and\n cookie.name.startswith(\"$\")):\n _debug(\" illegal name (starts with '$'): '%s'\", cookie.name)\n return False\n return True\n\n def set_ok_path(self, cookie, request):\n if cookie.path_specified:\n req_path = request_path(request)\n if ((cookie.version > 0 or\n (cookie.version == 0 and self.strict_ns_set_path)) and\n not req_path.startswith(cookie.path)):\n _debug(\" path attribute %s is not a prefix of request \"\n \"path %s\", cookie.path, req_path)\n return False\n return True\n\n def set_ok_domain(self, cookie, request):\n if self.is_blocked(cookie.domain):\n _debug(\" domain %s is in user block-list\", cookie.domain)\n return False\n if self.is_not_allowed(cookie.domain):\n _debug(\" domain %s is not in user allow-list\", cookie.domain)\n return False\n if cookie.domain_specified:\n req_host, erhn = eff_request_host(request)\n domain = cookie.domain\n if self.strict_domain and (domain.count(\".\") >= 2):\n # XXX This should probably be compared with the Konqueror\n # (kcookiejar.cpp) and Mozilla implementations, but it's a\n # losing battle.\n i = domain.rfind(\".\")\n j = domain.rfind(\".\", 0, i)\n if j == 0: # domain like .foo.bar\n tld = domain[i+1:]\n sld = domain[j+1:i]\n if sld.lower() in (\"co\", \"ac\", \"com\", \"edu\", \"org\", \"net\",\n \"gov\", \"mil\", \"int\", \"aero\", \"biz\", \"cat\", \"coop\",\n \"info\", \"jobs\", \"mobi\", \"museum\", \"name\", \"pro\",\n \"travel\", \"eu\") and len(tld) == 2:\n # domain like .co.uk\n _debug(\" country-code second level domain %s\", domain)\n return False\n if domain.startswith(\".\"):\n undotted_domain = domain[1:]\n else:\n undotted_domain = domain\n embedded_dots = (undotted_domain.find(\".\") >= 0)\n if not embedded_dots and domain != \".local\":\n _debug(\" non-local domain %s contains no embedded dot\",\n domain)\n return False\n if cookie.version == 0:\n if (not erhn.endswith(domain) and\n (not erhn.startswith(\".\") and\n not (\".\"+erhn).endswith(domain))):\n _debug(\" effective request-host %s (even with added \"\n \"initial dot) does not end with %s\",\n erhn, domain)\n return False\n if (cookie.version > 0 or\n (self.strict_ns_domain & self.DomainRFC2965Match)):\n if not domain_match(erhn, domain):\n _debug(\" effective request-host %s does not domain-match \"\n \"%s\", erhn, domain)\n return False\n if (cookie.version > 0 or\n (self.strict_ns_domain & self.DomainStrictNoDots)):\n host_prefix = req_host[:-len(domain)]\n if (host_prefix.find(\".\") >= 0 and\n not IPV4_RE.search(req_host)):\n _debug(\" host prefix %s for domain %s contains a dot\",\n host_prefix, domain)\n return False\n return True\n\n def set_ok_port(self, cookie, request):\n if cookie.port_specified:\n req_port = request_port(request)\n if req_port is None:\n req_port = \"80\"\n else:\n req_port = str(req_port)\n for p in cookie.port.split(\",\"):\n try:\n int(p)\n except ValueError:\n _debug(\" bad port %s (not numeric)\", p)\n return False\n if p == req_port:\n break\n else:\n _debug(\" request port (%s) not found in %s\",\n req_port, cookie.port)\n return False\n return True\n\n def return_ok(self, cookie, request):\n \"\"\"\n If you override .return_ok(), be sure to call this method. If it\n returns false, so should your subclass (assuming your subclass wants to\n be more strict about which cookies to return).\n\n \"\"\"\n # Path has already been checked by .path_return_ok(), and domain\n # blocking done by .domain_return_ok().\n _debug(\" - checking cookie %s=%s\", cookie.name, cookie.value)\n\n for n in \"version\", \"verifiability\", \"secure\", \"expires\", \"port\", \"domain\":\n fn_name = \"return_ok_\"+n\n fn = getattr(self, fn_name)\n if not fn(cookie, request):\n return False\n return True\n\n def return_ok_version(self, cookie, request):\n if cookie.version > 0 and not self.rfc2965:\n _debug(\" RFC 2965 cookies are switched off\")\n return False\n elif cookie.version == 0 and not self.netscape:\n _debug(\" Netscape cookies are switched off\")\n return False\n return True\n\n def return_ok_verifiability(self, cookie, request):\n if request.unverifiable and is_third_party(request):\n if cookie.version > 0 and self.strict_rfc2965_unverifiable:\n _debug(\" third-party RFC 2965 cookie during unverifiable \"\n \"transaction\")\n return False\n elif cookie.version == 0 and self.strict_ns_unverifiable:\n _debug(\" third-party Netscape cookie during unverifiable \"\n \"transaction\")\n return False\n return True\n\n def return_ok_secure(self, cookie, request):\n if cookie.secure and request.type != \"https\":\n _debug(\" secure cookie with non-secure request\")\n return False\n return True\n\n def return_ok_expires(self, cookie, request):\n if cookie.is_expired(self._now):\n _debug(\" cookie expired\")\n return False\n return True\n\n def return_ok_port(self, cookie, request):\n if cookie.port:\n req_port = request_port(request)\n if req_port is None:\n req_port = \"80\"\n for p in cookie.port.split(\",\"):\n if p == req_port:\n break\n else:\n _debug(\" request port %s does not match cookie port %s\",\n req_port, cookie.port)\n return False\n return True\n\n def return_ok_domain(self, cookie, request):\n req_host, erhn = eff_request_host(request)\n domain = cookie.domain\n\n # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't\n if (cookie.version == 0 and\n (self.strict_ns_domain & self.DomainStrictNonDomain) and\n not cookie.domain_specified and domain != erhn):\n _debug(\" cookie with unspecified domain does not string-compare \"\n \"equal to request domain\")\n return False\n\n if cookie.version > 0 and not domain_match(erhn, domain):\n _debug(\" effective request-host name %s does not domain-match \"\n \"RFC 2965 cookie domain %s\", erhn, domain)\n return False\n if cookie.version == 0 and not (\".\"+erhn).endswith(domain):\n _debug(\" request-host %s does not match Netscape cookie domain \"\n \"%s\", req_host, domain)\n return False\n return True\n\n def domain_return_ok(self, domain, request):\n # Liberal check of. This is here as an optimization to avoid\n # having to load lots of MSIE cookie files unless necessary.\n req_host, erhn = eff_request_host(request)\n if not req_host.startswith(\".\"):\n req_host = \".\"+req_host\n if not erhn.startswith(\".\"):\n erhn = \".\"+erhn\n if not (req_host.endswith(domain) or erhn.endswith(domain)):\n #_debug(\" request domain %s does not match cookie domain %s\",\n # req_host, domain)\n return False\n\n if self.is_blocked(domain):\n _debug(\" domain %s is in user block-list\", domain)\n return False\n if self.is_not_allowed(domain):\n _debug(\" domain %s is not in user allow-list\", domain)\n return False\n\n return True\n\n def path_return_ok(self, path, request):\n _debug(\"- checking cookie path=%s\", path)\n req_path = request_path(request)\n if not req_path.startswith(path):\n _debug(\" %s does not path-match %s\", req_path, path)\n return False\n return True\n\n\ndef vals_sorted_by_key(adict):\n keys = sorted(adict.keys())\n return map(adict.get, keys)\n\ndef deepvalues(mapping):\n \"\"\"Iterates over nested mapping, depth-first, in sorted order by key.\"\"\"\n values = vals_sorted_by_key(mapping)\n for obj in values:\n mapping = False\n try:\n obj.items\n except AttributeError:\n pass\n else:\n mapping = True\n for subobj in deepvalues(obj):\n yield subobj\n if not mapping:\n yield obj\n\n\n# Used as second parameter to dict.get() method, to distinguish absent\n# dict key from one with a None value.\nclass Absent: pass\n\nclass CookieJar:\n \"\"\"Collection of HTTP cookies.\n\n You may not need to know about this class: try\n urllib.request.build_opener(HTTPCookieProcessor).open(url).\n \"\"\"\n\n non_word_re = re.compile(r\"\\W\")\n quote_re = re.compile(r\"([\\\"\\\\])\")\n strict_domain_re = re.compile(r\"\\.?[^.]*\")\n domain_re = re.compile(r\"[^.]*\")\n dots_re = re.compile(r\"^\\.+\")\n\n magic_re = re.compile(r\"^\\#LWP-Cookies-(\\d+\\.\\d+)\", re.ASCII)\n\n def __init__(self, policy=None):\n if policy is None:\n policy = DefaultCookiePolicy()\n self._policy = policy\n\n self._cookies_lock = _threading.RLock()\n self._cookies = {}\n\n def set_policy(self, policy):\n self._policy = policy\n\n def _cookies_for_domain(self, domain, request):\n cookies = []\n if not self._policy.domain_return_ok(domain, request):\n return []\n _debug(\"Checking %s for cookies to return\", domain)\n cookies_by_path = self._cookies[domain]\n for path in cookies_by_path.keys():\n if not self._policy.path_return_ok(path, request):\n continue\n cookies_by_name = cookies_by_path[path]\n for cookie in cookies_by_name.values():\n if not self._policy.return_ok(cookie, request):\n _debug(\" not returning cookie\")\n continue\n _debug(\" it's a match\")\n cookies.append(cookie)\n return cookies\n\n def _cookies_for_request(self, request):\n \"\"\"Return a list of cookies to be returned to server.\"\"\"\n cookies = []\n for domain in self._cookies.keys():\n cookies.extend(self._cookies_for_domain(domain, request))\n return cookies\n\n def _cookie_attrs(self, cookies):\n \"\"\"Return a list of cookie-attributes to be returned to server.\n\n like ['foo=\"bar\"; $Path=\"/\"', ...]\n\n The $Version attribute is also added when appropriate (currently only\n once per request).\n\n \"\"\"\n # add cookies in order of most specific (ie. longest) path first\n cookies.sort(key=lambda a: len(a.path), reverse=True)\n\n version_set = False\n\n attrs = []\n for cookie in cookies:\n # set version of Cookie header\n # XXX\n # What should it be if multiple matching Set-Cookie headers have\n # different versions themselves?\n # Answer: there is no answer; was supposed to be settled by\n # RFC 2965 errata, but that may never appear...\n version = cookie.version\n if not version_set:\n version_set = True\n if version > 0:\n attrs.append(\"$Version=%s\" % version)\n\n # quote cookie value if necessary\n # (not for Netscape protocol, which already has any quotes\n # intact, due to the poorly-specified Netscape Cookie: syntax)\n if ((cookie.value is not None) and\n self.non_word_re.search(cookie.value) and version > 0):\n value = self.quote_re.sub(r\"\\\\\\1\", cookie.value)\n else:\n value = cookie.value\n\n # add cookie-attributes to be returned in Cookie header\n if cookie.value is None:\n attrs.append(cookie.name)\n else:\n attrs.append(\"%s=%s\" % (cookie.name, value))\n if version > 0:\n if cookie.path_specified:\n attrs.append('$Path=\"%s\"' % cookie.path)\n if cookie.domain.startswith(\".\"):\n domain = cookie.domain\n if (not cookie.domain_initial_dot and\n domain.startswith(\".\")):\n domain = domain[1:]\n attrs.append('$Domain=\"%s\"' % domain)\n if cookie.port is not None:\n p = \"$Port\"\n if cookie.port_specified:\n p = p + ('=\"%s\"' % cookie.port)\n attrs.append(p)\n\n return attrs\n\n def add_cookie_header(self, request):\n \"\"\"Add correct Cookie: header to request (urllib.request.Request object).\n\n The Cookie2 header is also added unless policy.hide_cookie2 is true.\n\n \"\"\"\n _debug(\"add_cookie_header\")\n self._cookies_lock.acquire()\n try:\n\n self._policy._now = self._now = int(time.time())\n\n cookies = self._cookies_for_request(request)\n\n attrs = self._cookie_attrs(cookies)\n if attrs:\n if not request.has_header(\"Cookie\"):\n request.add_unredirected_header(\n \"Cookie\", \"; \".join(attrs))\n\n # if necessary, advertise that we know RFC 2965\n if (self._policy.rfc2965 and not self._policy.hide_cookie2 and\n not request.has_header(\"Cookie2\")):\n for cookie in cookies:\n if cookie.version != 1:\n request.add_unredirected_header(\"Cookie2\", '$Version=\"1\"')\n break\n\n finally:\n self._cookies_lock.release()\n\n self.clear_expired_cookies()\n\n def _normalized_cookie_tuples(self, attrs_set):\n \"\"\"Return list of tuples containing normalised cookie information.\n\n attrs_set is the list of lists of key,value pairs extracted from\n the Set-Cookie or Set-Cookie2 headers.\n\n Tuples are name, value, standard, rest, where name and value are the\n cookie name and value, standard is a dictionary containing the standard\n cookie-attributes (discard, secure, version, expires or max-age,\n domain, path and port) and rest is a dictionary containing the rest of\n the cookie-attributes.\n\n \"\"\"\n cookie_tuples = []\n\n boolean_attrs = \"discard\", \"secure\"\n value_attrs = (\"version\",\n \"expires\", \"max-age\",\n \"domain\", \"path\", \"port\",\n \"comment\", \"commenturl\")\n\n for cookie_attrs in attrs_set:\n name, value = cookie_attrs[0]\n\n # Build dictionary of standard cookie-attributes (standard) and\n # dictionary of other cookie-attributes (rest).\n\n # Note: expiry time is normalised to seconds since epoch. V0\n # cookies should have the Expires cookie-attribute, and V1 cookies\n # should have Max-Age, but since V1 includes RFC 2109 cookies (and\n # since V0 cookies may be a mish-mash of Netscape and RFC 2109), we\n # accept either (but prefer Max-Age).\n max_age_set = False\n\n bad_cookie = False\n\n standard = {}\n rest = {}\n for k, v in cookie_attrs[1:]:\n lc = k.lower()\n # don't lose case distinction for unknown fields\n if lc in value_attrs or lc in boolean_attrs:\n k = lc\n if k in boolean_attrs and v is None:\n # boolean cookie-attribute is present, but has no value\n # (like \"discard\", rather than \"port=80\")\n v = True\n if k in standard:\n # only first value is significant\n continue\n if k == \"domain\":\n if v is None:\n _debug(\" missing value for domain attribute\")\n bad_cookie = True\n break\n # RFC 2965 section 3.3.3\n v = v.lower()\n if k == \"expires\":\n if max_age_set:\n # Prefer max-age to expires (like Mozilla)\n continue\n if v is None:\n _debug(\" missing or invalid value for expires \"\n \"attribute: treating as session cookie\")\n continue\n if k == \"max-age\":\n max_age_set = True\n try:\n v = int(v)\n except ValueError:\n _debug(\" missing or invalid (non-numeric) value for \"\n \"max-age attribute\")\n bad_cookie = True\n break\n # convert RFC 2965 Max-Age to seconds since epoch\n # XXX Strictly you're supposed to follow RFC 2616\n # age-calculation rules. Remember that zero Max-Age is a\n # is a request to discard (old and new) cookie, though.\n k = \"expires\"\n v = self._now + v\n if (k in value_attrs) or (k in boolean_attrs):\n if (v is None and\n k not in (\"port\", \"comment\", \"commenturl\")):\n _debug(\" missing value for %s attribute\" % k)\n bad_cookie = True\n break\n standard[k] = v\n else:\n rest[k] = v\n\n if bad_cookie:\n continue\n\n cookie_tuples.append((name, value, standard, rest))\n\n return cookie_tuples\n\n def _cookie_from_cookie_tuple(self, tup, request):\n # standard is dict of standard cookie-attributes, rest is dict of the\n # rest of them\n name, value, standard, rest = tup\n\n domain = standard.get(\"domain\", Absent)\n path = standard.get(\"path\", Absent)\n port = standard.get(\"port\", Absent)\n expires = standard.get(\"expires\", Absent)\n\n # set the easy defaults\n version = standard.get(\"version\", None)\n if version is not None:\n try:\n version = int(version)\n except ValueError:\n return None # invalid version, ignore cookie\n secure = standard.get(\"secure\", False)\n # (discard is also set if expires is Absent)\n discard = standard.get(\"discard\", False)\n comment = standard.get(\"comment\", None)\n comment_url = standard.get(\"commenturl\", None)\n\n # set default path\n if path is not Absent and path != \"\":\n path_specified = True\n path = escape_path(path)\n else:\n path_specified = False\n path = request_path(request)\n i = path.rfind(\"/\")\n if i != -1:\n if version == 0:\n # Netscape spec parts company from reality here\n path = path[:i]\n else:\n path = path[:i+1]\n if len(path) == 0: path = \"/\"\n\n # set default domain\n domain_specified = domain is not Absent\n # but first we have to remember whether it starts with a dot\n domain_initial_dot = False\n if domain_specified:\n domain_initial_dot = bool(domain.startswith(\".\"))\n if domain is Absent:\n req_host, erhn = eff_request_host(request)\n domain = erhn\n elif not domain.startswith(\".\"):\n domain = \".\"+domain\n\n # set default port\n port_specified = False\n if port is not Absent:\n if port is None:\n # Port attr present, but has no value: default to request port.\n # Cookie should then only be sent back on that port.\n port = request_port(request)\n else:\n port_specified = True\n port = re.sub(r\"\\s+\", \"\", port)\n else:\n # No port attr present. Cookie can be sent back on any port.\n port = None\n\n # set default expires and discard\n if expires is Absent:\n expires = None\n discard = True\n elif expires <= self._now:\n # Expiry date in past is request to delete cookie. This can't be\n # in DefaultCookiePolicy, because can't delete cookies there.\n try:\n self.clear(domain, path, name)\n except KeyError:\n pass\n _debug(\"Expiring cookie, domain='%s', path='%s', name='%s'\",\n domain, path, name)\n return None\n\n return Cookie(version,\n name, value,\n port, port_specified,\n domain, domain_specified, domain_initial_dot,\n path, path_specified,\n secure,\n expires,\n discard,\n comment,\n comment_url,\n rest)\n\n def _cookies_from_attrs_set(self, attrs_set, request):\n cookie_tuples = self._normalized_cookie_tuples(attrs_set)\n\n cookies = []\n for tup in cookie_tuples:\n cookie = self._cookie_from_cookie_tuple(tup, request)\n if cookie: cookies.append(cookie)\n return cookies\n\n def _process_rfc2109_cookies(self, cookies):\n rfc2109_as_ns = getattr(self._policy, 'rfc2109_as_netscape', None)\n if rfc2109_as_ns is None:\n rfc2109_as_ns = not self._policy.rfc2965\n for cookie in cookies:\n if cookie.version == 1:\n cookie.rfc2109 = True\n if rfc2109_as_ns:\n # treat 2109 cookies as Netscape cookies rather than\n # as RFC2965 cookies\n cookie.version = 0\n\n def make_cookies(self, response, request):\n \"\"\"Return sequence of Cookie objects extracted from response object.\"\"\"\n # get cookie-attributes for RFC 2965 and Netscape protocols\n headers = response.info()\n rfc2965_hdrs = headers.get_all(\"Set-Cookie2\", [])\n ns_hdrs = headers.get_all(\"Set-Cookie\", [])\n\n rfc2965 = self._policy.rfc2965\n netscape = self._policy.netscape\n\n if ((not rfc2965_hdrs and not ns_hdrs) or\n (not ns_hdrs and not rfc2965) or\n (not rfc2965_hdrs and not netscape) or\n (not netscape and not rfc2965)):\n return [] # no relevant cookie headers: quick exit\n\n try:\n cookies = self._cookies_from_attrs_set(\n split_header_words(rfc2965_hdrs), request)\n except Exception:\n _warn_unhandled_exception()\n cookies = []\n\n if ns_hdrs and netscape:\n try:\n # RFC 2109 and Netscape cookies\n ns_cookies = self._cookies_from_attrs_set(\n parse_ns_headers(ns_hdrs), request)\n except Exception:\n _warn_unhandled_exception()\n ns_cookies = []\n self._process_rfc2109_cookies(ns_cookies)\n\n # Look for Netscape cookies (from Set-Cookie headers) that match\n # corresponding RFC 2965 cookies (from Set-Cookie2 headers).\n # For each match, keep the RFC 2965 cookie and ignore the Netscape\n # cookie (RFC 2965 section 9.1). Actually, RFC 2109 cookies are\n # bundled in with the Netscape cookies for this purpose, which is\n # reasonable behaviour.\n if rfc2965:\n lookup = {}\n for cookie in cookies:\n lookup[(cookie.domain, cookie.path, cookie.name)] = None\n\n def no_matching_rfc2965(ns_cookie, lookup=lookup):\n key = ns_cookie.domain, ns_cookie.path, ns_cookie.name\n return key not in lookup\n ns_cookies = filter(no_matching_rfc2965, ns_cookies)\n\n if ns_cookies:\n cookies.extend(ns_cookies)\n\n return cookies\n\n def set_cookie_if_ok(self, cookie, request):\n \"\"\"Set a cookie if policy says it's OK to do so.\"\"\"\n self._cookies_lock.acquire()\n try:\n self._policy._now = self._now = int(time.time())\n\n if self._policy.set_ok(cookie, request):\n self.set_cookie(cookie)\n\n\n finally:\n self._cookies_lock.release()\n\n def set_cookie(self, cookie):\n \"\"\"Set a cookie, without checking whether or not it should be set.\"\"\"\n c = self._cookies\n self._cookies_lock.acquire()\n try:\n if cookie.domain not in c: c[cookie.domain] = {}\n c2 = c[cookie.domain]\n if cookie.path not in c2: c2[cookie.path] = {}\n c3 = c2[cookie.path]\n c3[cookie.name] = cookie\n finally:\n self._cookies_lock.release()\n\n def extract_cookies(self, response, request):\n \"\"\"Extract cookies from response, where allowable given the request.\"\"\"\n _debug(\"extract_cookies: %s\", response.info())\n self._cookies_lock.acquire()\n try:\n self._policy._now = self._now = int(time.time())\n\n for cookie in self.make_cookies(response, request):\n if self._policy.set_ok(cookie, request):\n _debug(\" setting cookie: %s\", cookie)\n self.set_cookie(cookie)\n finally:\n self._cookies_lock.release()\n\n def clear(self, domain=None, path=None, name=None):\n \"\"\"Clear some cookies.\n\n Invoking this method without arguments will clear all cookies. If\n given a single argument, only cookies belonging to that domain will be\n removed. If given two arguments, cookies belonging to the specified\n path within that domain are removed. If given three arguments, then\n the cookie with the specified name, path and domain is removed.\n\n Raises KeyError if no matching cookie exists.\n\n \"\"\"\n if name is not None:\n if (domain is None) or (path is None):\n raise ValueError(\n \"domain and path must be given to remove a cookie by name\")\n del self._cookies[domain][path][name]\n elif path is not None:\n if domain is None:\n raise ValueError(\n \"domain must be given to remove cookies by path\")\n del self._cookies[domain][path]\n elif domain is not None:\n del self._cookies[domain]\n else:\n self._cookies = {}\n\n def clear_session_cookies(self):\n \"\"\"Discard all session cookies.\n\n Note that the .save() method won't save session cookies anyway, unless\n you ask otherwise by passing a true ignore_discard argument.\n\n \"\"\"\n self._cookies_lock.acquire()\n try:\n for cookie in self:\n if cookie.discard:\n self.clear(cookie.domain, cookie.path, cookie.name)\n finally:\n self._cookies_lock.release()\n\n def clear_expired_cookies(self):\n \"\"\"Discard all expired cookies.\n\n You probably don't need to call this method: expired cookies are never\n sent back to the server (provided you're using DefaultCookiePolicy),\n this method is called by CookieJar itself every so often, and the\n .save() method won't save expired cookies anyway (unless you ask\n otherwise by passing a true ignore_expires argument).\n\n \"\"\"\n self._cookies_lock.acquire()\n try:\n now = time.time()\n for cookie in self:\n if cookie.is_expired(now):\n self.clear(cookie.domain, cookie.path, cookie.name)\n finally:\n self._cookies_lock.release()\n\n def __iter__(self):\n return deepvalues(self._cookies)\n\n def __len__(self):\n \"\"\"Return number of contained cookies.\"\"\"\n i = 0\n for cookie in self: i = i + 1\n return i\n\n def __repr__(self):\n r = []\n for cookie in self: r.append(repr(cookie))\n return \"<%s[%s]>\" % (self.__class__, \", \".join(r))\n\n def __str__(self):\n r = []\n for cookie in self: r.append(str(cookie))\n return \"<%s[%s]>\" % (self.__class__, \", \".join(r))\n\n\n# derives from IOError for backwards-compatibility with Python 2.4.0\nclass LoadError(IOError): pass\n\nclass FileCookieJar(CookieJar):\n \"\"\"CookieJar that can be loaded from and saved to a file.\"\"\"\n\n def __init__(self, filename=None, delayload=False, policy=None):\n \"\"\"\n Cookies are NOT loaded from the named file until either the .load() or\n .revert() method is called.\n\n \"\"\"\n CookieJar.__init__(self, policy)\n if filename is not None:\n try:\n filename+\"\"\n except:\n raise ValueError(\"filename must be string-like\")\n self.filename = filename\n self.delayload = bool(delayload)\n\n def save(self, filename=None, ignore_discard=False, ignore_expires=False):\n \"\"\"Save cookies to a file.\"\"\"\n raise NotImplementedError()\n\n def load(self, filename=None, ignore_discard=False, ignore_expires=False):\n \"\"\"Load cookies from a file.\"\"\"\n if filename is None:\n if self.filename is not None: filename = self.filename\n else: raise ValueError(MISSING_FILENAME_TEXT)\n\n f = open(filename)\n try:\n self._really_load(f, filename, ignore_discard, ignore_expires)\n finally:\n f.close()\n\n def revert(self, filename=None,\n ignore_discard=False, ignore_expires=False):\n \"\"\"Clear all cookies and reload cookies from a saved file.\n\n Raises LoadError (or IOError) if reversion is not successful; the\n object's state will not be altered if this happens.\n\n \"\"\"\n if filename is None:\n if self.filename is not None: filename = self.filename\n else: raise ValueError(MISSING_FILENAME_TEXT)\n\n self._cookies_lock.acquire()\n try:\n\n old_state = copy.deepcopy(self._cookies)\n self._cookies = {}\n try:\n self.load(filename, ignore_discard, ignore_expires)\n except (LoadError, IOError):\n self._cookies = old_state\n raise\n\n finally:\n self._cookies_lock.release()\n\n\ndef lwp_cookie_str(cookie):\n \"\"\"Return string representation of Cookie in an the LWP cookie file format.\n\n Actually, the format is extended a bit -- see module docstring.\n\n \"\"\"\n h = [(cookie.name, cookie.value),\n (\"path\", cookie.path),\n (\"domain\", cookie.domain)]\n if cookie.port is not None: h.append((\"port\", cookie.port))\n if cookie.path_specified: h.append((\"path_spec\", None))\n if cookie.port_specified: h.append((\"port_spec\", None))\n if cookie.domain_initial_dot: h.append((\"domain_dot\", None))\n if cookie.secure: h.append((\"secure\", None))\n if cookie.expires: h.append((\"expires\",\n time2isoz(float(cookie.expires))))\n if cookie.discard: h.append((\"discard\", None))\n if cookie.comment: h.append((\"comment\", cookie.comment))\n if cookie.comment_url: h.append((\"commenturl\", cookie.comment_url))\n\n keys = sorted(cookie._rest.keys())\n for k in keys:\n h.append((k, str(cookie._rest[k])))\n\n h.append((\"version\", str(cookie.version)))\n\n return join_header_words([h])\n\nclass LWPCookieJar(FileCookieJar):\n \"\"\"\n The LWPCookieJar saves a sequence of \"Set-Cookie3\" lines.\n \"Set-Cookie3\" is the format used by the libwww-perl libary, not known\n to be compatible with any browser, but which is easy to read and\n doesn't lose information about RFC 2965 cookies.\n\n Additional methods\n\n as_lwp_str(ignore_discard=True, ignore_expired=True)\n\n \"\"\"\n\n def as_lwp_str(self, ignore_discard=True, ignore_expires=True):\n \"\"\"Return cookies as a string of \"\\\\n\"-separated \"Set-Cookie3\" headers.\n\n ignore_discard and ignore_expires: see docstring for FileCookieJar.save\n\n \"\"\"\n now = time.time()\n r = []\n for cookie in self:\n if not ignore_discard and cookie.discard:\n continue\n if not ignore_expires and cookie.is_expired(now):\n continue\n r.append(\"Set-Cookie3: %s\" % lwp_cookie_str(cookie))\n return \"\\n\".join(r+[\"\"])\n\n def save(self, filename=None, ignore_discard=False, ignore_expires=False):\n if filename is None:\n if self.filename is not None: filename = self.filename\n else: raise ValueError(MISSING_FILENAME_TEXT)\n\n f = open(filename, \"w\")\n try:\n # There really isn't an LWP Cookies 2.0 format, but this indicates\n # that there is extra information in here (domain_dot and\n # port_spec) while still being compatible with libwww-perl, I hope.\n f.write(\"#LWP-Cookies-2.0\\n\")\n f.write(self.as_lwp_str(ignore_discard, ignore_expires))\n finally:\n f.close()\n\n def _really_load(self, f, filename, ignore_discard, ignore_expires):\n magic = f.readline()\n if not self.magic_re.search(magic):\n msg = (\"%r does not look like a Set-Cookie3 (LWP) format \"\n \"file\" % filename)\n raise LoadError(msg)\n\n now = time.time()\n\n header = \"Set-Cookie3:\"\n boolean_attrs = (\"port_spec\", \"path_spec\", \"domain_dot\",\n \"secure\", \"discard\")\n value_attrs = (\"version\",\n \"port\", \"path\", \"domain\",\n \"expires\",\n \"comment\", \"commenturl\")\n\n try:\n while 1:\n line = f.readline()\n if line == \"\": break\n if not line.startswith(header):\n continue\n line = line[len(header):].strip()\n\n for data in split_header_words([line]):\n name, value = data[0]\n standard = {}\n rest = {}\n for k in boolean_attrs:\n standard[k] = False\n for k, v in data[1:]:\n if k is not None:\n lc = k.lower()\n else:\n lc = None\n # don't lose case distinction for unknown fields\n if (lc in value_attrs) or (lc in boolean_attrs):\n k = lc\n if k in boolean_attrs:\n if v is None: v = True\n standard[k] = v\n elif k in value_attrs:\n standard[k] = v\n else:\n rest[k] = v\n\n h = standard.get\n expires = h(\"expires\")\n discard = h(\"discard\")\n if expires is not None:\n expires = iso2time(expires)\n if expires is None:\n discard = True\n domain = h(\"domain\")\n domain_specified = domain.startswith(\".\")\n c = Cookie(h(\"version\"), name, value,\n h(\"port\"), h(\"port_spec\"),\n domain, domain_specified, h(\"domain_dot\"),\n h(\"path\"), h(\"path_spec\"),\n h(\"secure\"),\n expires,\n discard,\n h(\"comment\"),\n h(\"commenturl\"),\n rest)\n if not ignore_discard and c.discard:\n continue\n if not ignore_expires and c.is_expired(now):\n continue\n self.set_cookie(c)\n\n except IOError:\n raise\n except Exception:\n _warn_unhandled_exception()\n raise LoadError(\"invalid Set-Cookie3 format file %r: %r\" %\n (filename, line))\n\n\nclass MozillaCookieJar(FileCookieJar):\n \"\"\"\n\n WARNING: you may want to backup your browser's cookies file if you use\n this class to save cookies. I *think* it works, but there have been\n bugs in the past!\n\n This class differs from CookieJar only in the format it uses to save and\n load cookies to and from a file. This class uses the Mozilla/Netscape\n `cookies.txt' format. lynx uses this file format, too.\n\n Don't expect cookies saved while the browser is running to be noticed by\n the browser (in fact, Mozilla on unix will overwrite your saved cookies if\n you change them on disk while it's running; on Windows, you probably can't\n save at all while the browser is running).\n\n Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to\n Netscape cookies on saving.\n\n In particular, the cookie version and port number information is lost,\n together with information about whether or not Path, Port and Discard were\n specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the\n domain as set in the HTTP header started with a dot (yes, I'm aware some\n domains in Netscape files start with a dot and some don't -- trust me, you\n really don't want to know any more about this).\n\n Note that though Mozilla and Netscape use the same format, they use\n slightly different headers. The class saves cookies using the Netscape\n header by default (Mozilla can cope with that).\n\n \"\"\"\n magic_re = re.compile(\"#( Netscape)? HTTP Cookie File\")\n header = \"\"\"\\\n# Netscape HTTP Cookie File\n# http://curl.haxx.se/rfc/cookie_spec.html\n# This is a generated file! Do not edit.\n\n\"\"\"\n\n def _really_load(self, f, filename, ignore_discard, ignore_expires):\n now = time.time()\n\n magic = f.readline()\n if not self.magic_re.search(magic):\n f.close()\n raise LoadError(\n \"%r does not look like a Netscape format cookies file\" %\n filename)\n\n try:\n while 1:\n line = f.readline()\n if line == \"\": break\n\n # last field may be absent, so keep any trailing tab\n if line.endswith(\"\\n\"): line = line[:-1]\n\n # skip comments and blank lines XXX what is $ for?\n if (line.strip().startswith((\"#\", \"$\")) or\n line.strip() == \"\"):\n continue\n\n domain, domain_specified, path, secure, expires, name, value = \\\n line.split(\"\\t\")\n secure = (secure == \"TRUE\")\n domain_specified = (domain_specified == \"TRUE\")\n if name == \"\":\n # cookies.txt regards 'Set-Cookie: foo' as a cookie\n # with no name, whereas http.cookiejar regards it as a\n # cookie with no value.\n name = value\n value = None\n\n initial_dot = domain.startswith(\".\")\n assert domain_specified == initial_dot\n\n discard = False\n if expires == \"\":\n expires = None\n discard = True\n\n # assume path_specified is false\n c = Cookie(0, name, value,\n None, False,\n domain, domain_specified, initial_dot,\n path, False,\n secure,\n expires,\n discard,\n None,\n None,\n {})\n if not ignore_discard and c.discard:\n continue\n if not ignore_expires and c.is_expired(now):\n continue\n self.set_cookie(c)\n\n except IOError:\n raise\n except Exception:\n _warn_unhandled_exception()\n raise LoadError(\"invalid Netscape format cookies file %r: %r\" %\n (filename, line))\n\n def save(self, filename=None, ignore_discard=False, ignore_expires=False):\n if filename is None:\n if self.filename is not None: filename = self.filename\n else: raise ValueError(MISSING_FILENAME_TEXT)\n\n f = open(filename, \"w\")\n try:\n f.write(self.header)\n now = time.time()\n for cookie in self:\n if not ignore_discard and cookie.discard:\n continue\n if not ignore_expires and cookie.is_expired(now):\n continue\n if cookie.secure: secure = \"TRUE\"\n else: secure = \"FALSE\"\n if cookie.domain.startswith(\".\"): initial_dot = \"TRUE\"\n else: initial_dot = \"FALSE\"\n if cookie.expires is not None:\n expires = str(cookie.expires)\n else:\n expires = \"\"\n if cookie.value is None:\n # cookies.txt regards 'Set-Cookie: foo' as a cookie\n # with no name, whereas http.cookiejar regards it as a\n # cookie with no value.\n name = \"\"\n value = cookie.name\n else:\n name = cookie.name\n value = cookie.value\n f.write(\n \"\\t\".join([cookie.domain, initial_dot, cookie.path,\n secure, expires, name, value])+\n \"\\n\")\n finally:\n f.close()\n", "meta": {"content_hash": "559998a8add628988658911617f859c8", "timestamp": "", "source": "github", "line_count": 2091, "max_line_length": 83, "avg_line_length": 36.324725011956005, "alnum_prop": 0.5385425580936081, "repo_name": "timm/timmnix", "id": "9fcd4c6f462e47f869f7cf67d496da38d7faf527", "size": "75955", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "pypy3-v5.5.0-linux64/lib-python/3/http/cookiejar.py", "mode": "33261", "license": "mit", "language": [{"name": "Assembly", "bytes": "1641"}, {"name": "Batchfile", "bytes": "1234"}, {"name": "C", "bytes": "436685"}, {"name": "CSS", "bytes": "96"}, {"name": "Common Lisp", "bytes": "4"}, {"name": "Emacs Lisp", "bytes": "290698"}, {"name": "HTML", "bytes": "111577"}, {"name": "Makefile", "bytes": "1681"}, {"name": "PLSQL", "bytes": "22886"}, {"name": "PowerShell", "bytes": "1540"}, {"name": "Prolog", "bytes": "14301"}, {"name": "Python", "bytes": "21267592"}, {"name": "Roff", "bytes": "21080"}, {"name": "Shell", "bytes": "27687"}, {"name": "TeX", "bytes": "3052861"}, {"name": "VBScript", "bytes": "481"}]}} {"text": "package org.mule.tooling.esb.lang.mel.highlighter;\n\n\nimport com.intellij.openapi.fileTypes.SyntaxHighlighter;\nimport com.intellij.openapi.fileTypes.SyntaxHighlighterFactory;\nimport com.intellij.openapi.project.Project;\nimport com.intellij.openapi.vfs.VirtualFile;\nimport org.jetbrains.annotations.NotNull;\nimport org.jetbrains.annotations.Nullable;\n\npublic class MelSyntaxHighlighterFactory extends SyntaxHighlighterFactory {\n @NotNull\n @Override\n public SyntaxHighlighter getSyntaxHighlighter(@Nullable Project project, @Nullable VirtualFile virtualFile) {\n return MelSyntaxHighlighter.getInstance();\n }\n}\n", "meta": {"content_hash": "bb842a07b7fb793876f129aaa7c8d4a8", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 113, "avg_line_length": 36.8235294117647, "alnum_prop": 0.8210862619808307, "repo_name": "machaval/mule-intellij-plugins", "id": "9799708e729ba0ce58ab3830b4aa7889ea64ca45", "size": "626", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "mule-esb-plugin/src/main/java/org/mule/tooling/esb/lang/mel/highlighter/MelSyntaxHighlighterFactory.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "660750"}, {"name": "Lex", "bytes": "10081"}]}} {"text": "// Copyright (c) 2019-2020 The Bitcoin Core developers\n// Distributed under the MIT software license, see the accompanying\n// file COPYING or http://www.opensource.org/licenses/mit-license.php.\n\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\n\n

org.apache.river.api.io

\n\n\n\n", "meta": {"content_hash": "f0a8e712d1e3aff916a8575064506b2d", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 175, "avg_line_length": 66.65217391304348, "alnum_prop": 0.7292889758643183, "repo_name": "pfirmstone/JGDMS", "id": "5fcef349a9973a478ccb2f41c5b8ba4953cd0cac", "size": "3066", "binary": false, "copies": "2", "ref": "refs/heads/trunk", "path": "JGDMS/src/site/resources/old-static-site/doc/api/org/apache/river/api/io/package-frame.html", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "38260"}, {"name": "Groovy", "bytes": "30510"}, {"name": "HTML", "bytes": "107806458"}, {"name": "Java", "bytes": "24863323"}, {"name": "JavaScript", "bytes": "1702"}, {"name": "Makefile", "bytes": "3032"}, {"name": "Roff", "bytes": "863"}, {"name": "Shell", "bytes": "68247"}]}} {"text": "angular\n .module(\"avaliacao\", ['ui.router', 'ui.bootstrap'])\n .config([\"$stateProvider\", '$urlRouterProvider', \"$locationProvider\", routes]);\n\nfunction routes($stateProvider, $urlRouterProvider, $locationProvider) {\n $urlRouterProvider.otherwise('/');\n /*\n $locationProvider.html5Mode({\n enabled: true,\n requireBase: false\n \n });\n */\n $stateProvider\n .state('home', {\n url: '/',\n templateUrl: 'templates/home.html'\n })\n .state('Login', {\n url: '/Login',\n templateUrl: 'templates/Login.html'\n });\n}", "meta": {"content_hash": "9c4f7f3803eb4e78baab83f40b44cbce", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 83, "avg_line_length": 26.652173913043477, "alnum_prop": 0.5497553017944535, "repo_name": "MaikolSilva/desenvolvimento-web-ii", "id": "c9c10c93c6d0e4663b6ca952b7ac869693f2a4ee", "size": "613", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Avalia\u00e7\u00e3o/js/app.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "803"}, {"name": "HTML", "bytes": "4267"}]}} {"text": "import './index';\n\nimport sinon from 'sinon';\nimport { expect } from 'chai';\nimport { $, $$ } from 'common-sk/modules/dom';\nimport fetchMock from 'fetch-mock';\n\nimport { GetTasksResponse } from '../json';\nimport {\n singleResultCanDelete, singleResultNoDelete, resultSetOneItem, resultSetTwoItems,\n} from './test_data';\nimport {\n eventPromise,\n setUpElementUnderTest,\n} from '../../../infra-sk/modules/test_util';\n\ndescribe('task-queue-sk', () => {\n const newInstance = setUpElementUnderTest('task-queue-sk');\n fetchMock.config.overwriteRoutes = false;\n\n const loadTable = async () => {\n const event = eventPromise('end-task');\n const taskTableSk = newInstance();\n await event;\n return taskTableSk;\n };\n const loadTableWithReplies = async (replies: GetTasksResponse[]) => {\n const kNumTaskQueries = 16;\n const replyCount = replies.length;\n expect(replyCount).to.be.most(kNumTaskQueries);\n for (let i = 0; i < replyCount; ++i) {\n fetchMock.postOnce('begin:/_/get_', replies[i]);\n }\n fetchMock.post('begin:/_/get_', 200, { repeat: kNumTaskQueries - replyCount });\n\n return loadTable();\n };\n\n afterEach(() => {\n // Check all mock fetches called at least once and reset.\n expect(fetchMock.done()).to.be.true;\n fetchMock.reset();\n sinon.restore();\n });\n\n it('shows table entries', async () => {\n // Return some results for 2 of the 16 task queries.\n const table = await loadTableWithReplies([resultSetOneItem, resultSetTwoItems]);\n\n // (3 items) * 6 columns\n expect($('td', table).length).to.equal(18);\n });\n\n it('delete option shown', async () => {\n const table = await loadTableWithReplies([singleResultCanDelete]);\n\n expect($$('delete-icon-sk', table)).to.have.property('hidden', false);\n });\n\n it('delete option hidden', async () => {\n const table = await loadTableWithReplies([singleResultNoDelete]);\n\n expect($$('delete-icon-sk', table)).to.have.property('hidden', true);\n });\n\n it('delete flow works', async () => {\n const table = await loadTableWithReplies([singleResultCanDelete]);\n\n sinon.stub(window, 'confirm').returns(true);\n sinon.stub(window, 'alert');\n fetchMock.postOnce((url, options) => url.startsWith('/_/delete_') && options.body === JSON.stringify({ id: 1 }), 200);\n ($$('delete-icon-sk', table) as HTMLElement).click();\n });\n\n it('task details works', async () => {\n const table = await loadTableWithReplies([resultSetOneItem]);\n\n expect($$('.dialog-background', table)!.classList.value).to.include('hidden');\n expect($$('.dialog-background', table)!.classList.value).to.include('hidden');\n ($$('.details', table) as HTMLElement).click();\n\n expect($$('.dialog-background', table)!.classList.value).to.not.include('hidden');\n });\n});\n", "meta": {"content_hash": "43fd414ca959907e65c5de1122d0626e", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 122, "avg_line_length": 33.04761904761905, "alnum_prop": 0.6534582132564841, "repo_name": "google/skia-buildbot", "id": "c942c4cc023ef256228c8bfd83640f957a988700", "size": "2776", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "ct/modules/task-queue-sk/task-queue-sk_test.ts", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Batchfile", "bytes": "736"}, {"name": "C", "bytes": "3114"}, {"name": "C++", "bytes": "18072"}, {"name": "CSS", "bytes": "13967"}, {"name": "Dockerfile", "bytes": "18546"}, {"name": "Go", "bytes": "8744467"}, {"name": "HTML", "bytes": "790880"}, {"name": "JavaScript", "bytes": "1186449"}, {"name": "Jupyter Notebook", "bytes": "9165"}, {"name": "Makefile", "bytes": "75823"}, {"name": "PowerShell", "bytes": "15305"}, {"name": "Python", "bytes": "126773"}, {"name": "SCSS", "bytes": "128048"}, {"name": "Shell", "bytes": "232449"}, {"name": "Starlark", "bytes": "234929"}, {"name": "TypeScript", "bytes": "1568540"}]}} {"text": "\n#pragma once\n#ifndef EL_READ_ASCII_HPP\n#define EL_READ_ASCII_HPP\n\nnamespace El {\nnamespace read {\n\ntemplate\ninline void\nAscii( Matrix& A, const string filename )\n{\n DEBUG_ONLY(CSE cse(\"read::Ascii\"))\n std::ifstream file( filename.c_str() );\n if( !file.is_open() )\n RuntimeError(\"Could not open \",filename);\n\n // Walk through the file once to both count the number of rows and\n // columns and to ensure that the number of columns is consistent\n Int height=0, width=0;\n string line;\n while( std::getline( file, line ) )\n {\n std::stringstream lineStream( line );\n Int numCols=0;\n T value;\n while( lineStream >> value ) ++numCols;\n if( numCols != 0 )\n {\n if( numCols != width && width != 0 )\n LogicError(\"Inconsistent number of columns\");\n else\n width = numCols;\n ++height;\n }\n }\n file.clear();\n file.seekg(0,file.beg);\n\n // Resize the matrix and then read it\n A.Resize( height, width );\n Int i=0;\n while( std::getline( file, line ) )\n {\n std::stringstream lineStream( line );\n Int j=0;\n T value;\n while( lineStream >> value )\n {\n A.Set( i, j, value );\n ++j;\n }\n ++i;\n }\n}\n\ntemplate\ninline void\nAscii( AbstractDistMatrix& A, const string filename )\n{\n DEBUG_ONLY(CSE cse(\"read::Ascii\"))\n std::ifstream file( filename.c_str() );\n if( !file.is_open() )\n RuntimeError(\"Could not open \",filename);\n\n // Walk through the file once to both count the number of rows and\n // columns and to ensure that the number of columns is consistent\n Int height=0, width=0;\n string line;\n while( std::getline( file, line ) )\n {\n std::stringstream lineStream( line );\n Int numCols=0;\n T value;\n while( lineStream >> value ) ++numCols;\n if( numCols != 0 )\n {\n if( numCols != width && width != 0 )\n LogicError(\"Inconsistent number of columns\");\n else\n width = numCols;\n ++height;\n }\n }\n file.clear();\n file.seekg(0,file.beg);\n\n // Resize the matrix and then read in our local portion\n A.Resize( height, width );\n Int i=0;\n while( std::getline( file, line ) )\n {\n std::stringstream lineStream( line );\n Int j=0;\n T value;\n while( lineStream >> value )\n {\n A.Set( i, j, value );\n ++j;\n }\n ++i;\n }\n}\n\ntemplate\ninline void\nAscii( AbstractBlockDistMatrix& A, const string filename )\n{\n DEBUG_ONLY(CSE cse(\"read::Ascii\"))\n std::ifstream file( filename.c_str() );\n if( !file.is_open() )\n RuntimeError(\"Could not open \",filename);\n\n // Walk through the file once to both count the number of rows and\n // columns and to ensure that the number of columns is consistent\n Int height=0, width=0;\n string line;\n while( std::getline( file, line ) )\n {\n std::stringstream lineStream( line );\n Int numCols=0;\n T value;\n while( lineStream >> value ) ++numCols;\n if( numCols != 0 )\n {\n if( numCols != width && width != 0 )\n LogicError(\"Inconsistent number of columns\");\n else\n width = numCols;\n ++height;\n }\n }\n file.clear();\n file.seekg(0,file.beg);\n\n // Resize the matrix and then read in our local portion\n A.Resize( height, width );\n Int i=0;\n while( std::getline( file, line ) )\n {\n std::stringstream lineStream( line );\n Int j=0;\n T value;\n while( lineStream >> value )\n {\n A.Set( i, j, value );\n ++j;\n }\n ++i;\n }\n}\n\n} // namespace read\n} // namespace El\n\n#endif // ifndef EL_READ_ASCII_HPP\n", "meta": {"content_hash": "be37942c1d5e3326e29d1fc221ad11c0", "timestamp": "", "source": "github", "line_count": 156, "max_line_length": 70, "avg_line_length": 25.185897435897434, "alnum_prop": 0.5357597353016035, "repo_name": "justusc/Elemental", "id": "20defeca2c500ce2defc4edba25439911e0df067", "size": "4193", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/io/Read/Ascii.hpp", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "760573"}, {"name": "C++", "bytes": "7177017"}, {"name": "CMake", "bytes": "186926"}, {"name": "Makefile", "bytes": "333"}, {"name": "Matlab", "bytes": "13306"}, {"name": "Python", "bytes": "942707"}, {"name": "Ruby", "bytes": "1393"}, {"name": "Shell", "bytes": "1335"}, {"name": "TeX", "bytes": "23728"}]}} {"text": "\n\n \n \n \n opam-website: Not compatible\n \n \n \n \n \n \n \n \n \n \n
\n \n
\n
\n
\n \u00ab Up\n

\n opam-website\n \n 1.2.1\n Not compatible\n \n

\n

(2020-06-25 03:38:52 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-num            base        Num library distributed with the OCaml compiler\nbase-threads        base\nbase-unix           base\ncamlp5              7.12        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\ncoq                 8.5.0       Formal proof management system.\nnum                 0           The Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.05.0      The OCaml compiler (virtual package)\nocaml-base-compiler 4.05.0      Official 4.05.0 release\nocaml-config        1           OCaml Switch Configuration\n# opam file:\nopam-version: "2.0"\nmaintainer: "dev@clarus.me"\nhomepage: "https://github.com/clarus/coq-opam-website"\ndev-repo: "git+https://github.com/clarus/coq-opam-website.git"\nbug-reports: "https://github.com/clarus/coq-opam-website/issues"\nauthors: ["Guillaume Claret"]\nlicense: "MIT"\nbuild: [\n  ["./configure.sh"]\n  [make "-j%{jobs}%"]\n  ["sh" "-c" "cd extraction && make"]\n]\ndepends: [\n  "ocaml"\n  "coq" {>= "8.4pl4"}\n  "coq-io" {>= "3.1.0"}\n  "coq-io-exception" {>= "1.0.0"}\n  "coq-io-system" {>= "2.3.0"}\n  "coq-list-string" {>= "2.1.0"}\n]\nsynopsis: "Generation of a Coq website for OPAM: http://coq.io/opam/ "\nurl {\n  src: "https://github.com/coq-io/opam-website/archive/1.2.1.tar.gz"\n  checksum: "md5=8456933fe4380095c46240546fb2bad1"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-opam-website.1.2.1 coq.8.5.0
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.5.0).\nThe following dependencies couldn't be met:\n  - coq-opam-website -> coq-io-exception -> coq < 8.5~ -> ocaml < 4.03.0\n      base of this switch (use `--unlock-base' to force)\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-opam-website.1.2.1
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub. \u00a9 Guillaume Claret.\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "4ab1b2b0574a85e0ef395ea1715d806f", "timestamp": "", "source": "github", "line_count": 164, "max_line_length": 157, "avg_line_length": 40.707317073170735, "alnum_prop": 0.5286099460754943, "repo_name": "coq-bench/coq-bench.github.io", "id": "5e3ef27e413adbe420b32ced99dade95ddd562ac", "size": "6678", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.05.0-2.0.6/released/8.5.0/opam-website/1.2.1.html", "mode": "33188", "license": "mit", "language": []}} {"text": "\n\n#include \"unix/guts.h\"\n#include \"Application.h\"\n#include \"Clipboard.h\"\n#include \"Icon.h\"\n\n#define WIN PComponent(application)-> handle\n\n#define CF_NAME(x) (guts. clipboard_formats[(x)*3])\n#define CF_TYPE(x) (guts. clipboard_formats[(x)*3+1])\n#define CF_FORMAT(x) (guts. clipboard_formats[(x)*3+2])\n#define CF_ASSIGN(i,a,b,c) CF_NAME(i)=(a);CF_TYPE(i)=(b);CF_FORMAT(i)=((Atom)c)\n#define CF_32 (sizeof(long)*8) /* 32-bit properties are hacky */\n\nBool\nprima_init_clipboard_subsystem(char * error_buf)\n{\n guts. clipboards = hash_create();\n \n if ( !(guts. clipboard_formats = malloc( cfCOUNT * 3 * sizeof(Atom)))) {\n sprintf( error_buf, \"No memory\");\n return false;\n }\n guts. clipboard_formats_count = cfCOUNT;\n#if (cfText != 0) || (cfBitmap != 1) || (cfUTF8 != 2)\n#error broken clipboard type formats\n#endif \n\n CF_ASSIGN(cfText, XA_STRING, XA_STRING, 8);\n CF_ASSIGN(cfUTF8, UTF8_STRING, UTF8_STRING, 8);\n CF_ASSIGN(cfBitmap, XA_PIXMAP, XA_PIXMAP, CF_32);\n CF_ASSIGN(cfTargets, CF_TARGETS, XA_ATOM, CF_32);\n\n /* XXX - bitmaps and indexed pixmaps may have the associated colormap or pixel values \n CF_ASSIGN(cfPalette, XA_COLORMAP, XA_ATOM, CF_32);\n CF_ASSIGN(cfForeground, CF_FOREGROUND, CF_PIXEL, CF_32);\n CF_ASSIGN(cfBackground, CF_BACKGROUND, CF_PIXEL, CF_32);\n */\n \n guts. clipboard_event_timeout = 2000;\n return true;\n}\n\nPList\napc_get_standard_clipboards( void)\n{\n PList l = plist_create( 3, 1);\n if (!l) return nil;\n list_add( l, (Handle)duplicate_string( \"Primary\"));\n list_add( l, (Handle)duplicate_string( \"Secondary\"));\n list_add( l, (Handle)duplicate_string( \"Clipboard\"));\n return l;\n}\n\nBool\napc_clipboard_create( Handle self)\n{\n PClipboard c = (PClipboard)self;\n char *name, *x;\n DEFCC;\n\n XX-> selection = None;\n \n name = x = duplicate_string( c-> name);\n while (*x) {\n *x = toupper(*x);\n x++;\n }\n XX-> selection = XInternAtom( DISP, name, false);\n free( name);\n\n if ( hash_fetch( guts.clipboards, &XX->selection, sizeof(XX->selection))) {\n warn(\"This clipboard is already present\");\n return false;\n }\n\n if ( !( XX-> internal = malloc( sizeof( ClipboardDataItem) * cfCOUNT))) {\n warn(\"Not enough memory\");\n return false;\n }\n if ( !( XX-> external = malloc( sizeof( ClipboardDataItem) * cfCOUNT))) {\n free( XX-> internal);\n warn(\"Not enough memory\");\n return false;\n }\n bzero( XX-> internal, sizeof( ClipboardDataItem) * cfCOUNT);\n bzero( XX-> external, sizeof( ClipboardDataItem) * cfCOUNT);\n\n hash_store( guts.clipboards, &XX->selection, sizeof(XX->selection), (void*)self);\n\n return true;\n}\n\nstatic void\nclipboard_free_data( void * data, int size, Handle id)\n{\n if ( size <= 0) {\n if ( size == 0 && data != nil) free( data);\n return;\n }\n if ( id == cfBitmap) {\n int i;\n Pixmap * p = (Pixmap*) data;\n for ( i = 0; i < size/sizeof(Pixmap); i++, p++)\n if ( *p)\n XFreePixmap( DISP, *p);\n }\n free( data);\n}\n\n/*\n each clipboard type can be represented by a set of \n X properties pairs, where each is X name and X type.\n get_typename() returns such pairs by the index.\n */\nstatic Atom\nget_typename( Handle id, int index, Atom * type)\n{\n if ( type) *type = None;\n switch ( id) {\n case cfUTF8:\n if ( index > 1) return None;\n if ( index == 0) {\n if ( type) *type = CF_TYPE(id);\n return CF_NAME(id);\n } else {\n if ( type) *type = UTF8_MIME;\n return UTF8_MIME;\n }\n case cfBitmap:\n if ( index > 1) return None;\n if ( index == 0) {\n if ( type) *type = CF_TYPE(id);\n\t return CF_NAME(id);\n } else {\n if ( type) *type = XA_BITMAP;\n\t return XA_BITMAP;\n }\n case cfTargets:\n if ( index > 1) return None;\n if ( index == 0) {\n if ( type) *type = CF_TYPE(id);\n return CF_NAME(id);\n } else {\n if ( type) *type = CF_TARGETS;\n return CF_NAME(id);\n }\n }\n if ( index > 0) return None;\n if ( type) *type = CF_TYPE(id);\n return CF_NAME(id);\n}\n\nstatic void\nclipboard_kill_item( PClipboardDataItem item, Handle id)\n{\n item += id;\n clipboard_free_data( item-> data, item-> size, id);\n item-> data = nil;\n item-> size = 0;\n item-> name = get_typename( id, 0, nil);\n}\n\n/*\n Deletes a transfer record from pending xfer chain.\n */\nstatic void\ndelete_xfer( PClipboardSysData cc, ClipboardXfer * xfer)\n{\n ClipboardXferKey key;\n CLIPBOARD_XFER_KEY( key, xfer-> requestor, xfer-> property);\n if ( guts. clipboard_xfers) {\n IV refcnt;\n hash_delete( guts. clipboard_xfers, key, sizeof( key), false);\n refcnt = PTR2IV( hash_fetch( guts. clipboard_xfers, &xfer-> requestor, sizeof(XWindow)));\n if ( --refcnt == 0) {\n XSelectInput( DISP, xfer-> requestor, 0);\n hash_delete( guts. clipboard_xfers, &xfer-> requestor, sizeof(XWindow), false);\n } else {\n if ( refcnt < 0) refcnt = 0;\n hash_store( guts. clipboard_xfers, &xfer-> requestor, sizeof(XWindow), INT2PTR(void*, refcnt));\n }\n }\n if ( cc-> xfers) \n list_delete( cc-> xfers, ( Handle) xfer);\n if ( xfer-> data_detached && xfer-> data_master) \n clipboard_free_data( xfer-> data, xfer-> size, xfer-> id);\n free( xfer);\n}\n\nBool\napc_clipboard_destroy( Handle self)\n{\n DEFCC;\n int i;\n\n if (XX-> selection == None) return true;\n\n if ( XX-> xfers) {\n for ( i = 0; i < XX-> xfers-> count; i++) \n delete_xfer( XX, ( ClipboardXfer*) XX-> xfers-> items[i]);\n plist_destroy( XX-> xfers);\n }\n\n for ( i = 0; i < guts. clipboard_formats_count; i++) {\n if ( XX-> external) clipboard_kill_item( XX-> external, i);\n if ( XX-> internal) clipboard_kill_item( XX-> internal, i);\n }\n\n free( XX-> external);\n free( XX-> internal);\n hash_delete( guts.clipboards, &XX->selection, sizeof(XX->selection), false);\n\n XX-> selection = None;\n return true;\n}\n\nBool\napc_clipboard_open( Handle self)\n{\n DEFCC;\n if ( XX-> opened) return false;\n XX-> opened = true;\n \n if ( !XX-> inside_event) XX-> need_write = false;\n\n return true;\n}\n\nBool\napc_clipboard_close( Handle self)\n{\n DEFCC;\n if ( !XX-> opened) return false;\n XX-> opened = false;\n\n /* check if UTF8 is present and Text is not, and downgrade */\n if ( XX-> need_write &&\n\tXX-> internal[cfUTF8]. size > 0 &&\n\tXX-> internal[cfText]. size == 0) {\n Byte * src = XX-> internal[cfUTF8]. data;\n int len = utf8_length( src, src + XX-> internal[cfUTF8]. size);\n if (( XX-> internal[cfText]. data = malloc( len))) {\n\t STRLEN charlen;\n\t U8 *dst;\n\t dst = XX-> internal[cfText]. data;\n XX-> internal[cfText]. size = len;\n\t while ( len--) {\n register UV u = \n#if PERL_PATCHLEVEL >= 16\n\t utf8_to_uvchr_buf( src, src + XX-> internal[cfUTF8]. size, &charlen);\n#else\n\t utf8_to_uvchr( src, &charlen)\n#endif\n\t ;\n\t *(dst++) = ( u < 0x7f) ? u : '?'; /* XXX employ $LANG and iconv() */\n\t src += charlen;\n\t }\n } \n }\n \n\n if ( !XX-> inside_event) {\n int i; \n for ( i = 0; i < guts. clipboard_formats_count; i++) \n clipboard_kill_item( XX-> external, i);\n if ( XX-> need_write) \n if ( XGetSelectionOwner( DISP, XX-> selection) != WIN) \n XSetSelectionOwner( DISP, XX-> selection, WIN, CurrentTime);\n }\n \n return true;\n}\n\n/*\n Detaches data for pending transfers from XX, so eventual changes \n to XX->internal would not affect them. detach_xfers() should be\n called before clipboard_kill_item(XX-> internal), otherwise\n there's a chance of coredump.\n */\nstatic void\ndetach_xfers( PClipboardSysData XX, Handle id, Bool clear_original_data)\n{\n int i, got_master = 0, got_anything = 0;\n if ( !XX-> xfers) return;\n for ( i = 0; i < XX-> xfers-> count; i++) {\n ClipboardXfer * x = ( ClipboardXfer *) XX-> xfers-> items[i];\n if ( x-> data_detached || x-> id != id) continue;\n got_anything = 1;\n if ( !got_master) {\n x-> data_master = true;\n got_master = 1;\n }\n x-> data_detached = true;\n } \n if ( got_anything && clear_original_data) {\n XX-> internal[id]. data = nil;\n XX-> internal[id]. size = 0;\n XX-> internal[id]. name = get_typename( id, 0, nil);\n }\n}\n\nBool\napc_clipboard_clear( Handle self)\n{\n DEFCC;\n int i;\n\n for ( i = 0; i < guts. clipboard_formats_count; i++) {\n detach_xfers( XX, i, true);\n clipboard_kill_item( XX-> internal, i);\n clipboard_kill_item( XX-> external, i);\n }\n \n if ( XX-> inside_event) { \n XX-> need_write = true; \n } else {\n XWindow owner = XGetSelectionOwner( DISP, XX-> selection);\n XX-> need_write = false;\n if ( owner != None && owner != WIN)\n XSetSelectionOwner( DISP, XX-> selection, None, CurrentTime);\n }\n\n return true;\n}\n\ntypedef struct {\n Atom selection;\n long mask;\n} SelectionProcData;\n\n#define SELECTION_NOTIFY_MASK 1\n#define PROPERTY_NOTIFY_MASK 2\n\nstatic int\nselection_filter( Display * disp, XEvent * ev, SelectionProcData * data)\n{\n switch ( ev-> type) {\n case PropertyNotify:\n return (data-> mask & PROPERTY_NOTIFY_MASK) && (data-> selection == ev-> xproperty. atom);\n case SelectionRequest:\n case SelectionClear:\n case MappingNotify:\n return true;\n case SelectionNotify:\n return (data-> mask & SELECTION_NOTIFY_MASK) && (data-> selection == ev-> xselection. selection);\n case ClientMessage:\n if ( ev-> xclient. window == WIN ||\n ev-> xclient. window == guts. root ||\n ev-> xclient. window == None) return true;\n if ( hash_fetch( guts.windows, (void*)&ev-> xclient. window, \n sizeof(ev-> xclient. window))) return false;\n return true;\n }\n return false;\n}\n\n#define CFDATA_NONE 0\n#define CFDATA_NOT_ACQUIRED (-1)\n#define CFDATA_ERROR (-2)\n\n#define RPS_OK 0\n#define RPS_PARTIAL 1\n#define RPS_NODATA 2\n#define RPS_ERROR 3\n\nstatic int\nread_property( Atom property, Atom * type, int * format, \n unsigned long * size, unsigned char ** data)\n{\n int ret = ( *size > 0) ? RPS_PARTIAL : RPS_ERROR;\n unsigned char * prop, *a1;\n unsigned long n, left, offs = 0, new_size, big_offs = *size;\n\n XCHECKPOINT;\n Cdebug(\"clipboard: read_property: %s\\n\", XGetAtomName(DISP, property));\n while ( 1) {\n if ( XGetWindowProperty( DISP, WIN, property,\n offs, guts. limits. request_length - 4, false, \n AnyPropertyType, \n type, format, &n, &left, &prop) != Success) {\n XDeleteProperty( DISP, WIN, property);\n\t Cdebug(\"clipboard:fail\\n\");\n return ret;\n }\n XCHECKPOINT;\n Cdebug(\"clipboard: type=0x%x(%s) fmt=%d n=%d left=%d\\n\", \n\t *type, XGetAtomName(DISP,*type), *format, n, left);\n \n if ( *format == 32) *format = CF_32;\n\n if ( *type == 0 ) return RPS_NODATA;\n\n new_size = n * *format / 8;\n\n if ( new_size > 0) {\n if ( !( a1 = realloc( *data, big_offs + offs * 4 + new_size))) {\n warn(\"Not enough memory: %ld bytes\\n\", offs * 4 + new_size);\n XDeleteProperty( DISP, WIN, property);\n XFree( prop);\n return ret;\n }\n *data = a1;\n memcpy( *data + big_offs + offs * 4, prop, new_size);\n *size = big_offs + (offs * 4) + new_size;\n if ( *size > INT_MAX) *size = INT_MAX;\n offs += new_size / 4;\n ret = RPS_PARTIAL;\n }\n XFree( prop);\n if ( left <= 0 || *size == INT_MAX || n * *format == 0) break;\n }\n\n XDeleteProperty( DISP, WIN, property);\n XCHECKPOINT;\n\n return RPS_OK;\n}\n\nstatic Bool\nquery_datum( Handle self, Handle id, Atom query_target, Atom query_type)\n{\n DEFCC;\n XEvent ev;\n Atom type;\n int format, rps;\n SelectionProcData spd;\n unsigned long size = 0, incr = 0, old_size, delay;\n unsigned char * data;\n struct timeval start_time, timeout;\n \n /* init */\n if ( query_target == None) return false;\n data = malloc(0);\n XX-> external[id]. size = CFDATA_ERROR;\n gettimeofday( &start_time, nil);\n XCHECKPOINT;\n Cdebug(\"clipboard:convert %s from %08x\\n\", XGetAtomName( DISP, query_target), WIN);\n XDeleteProperty( DISP, WIN, XX-> selection);\n XConvertSelection( DISP, XX-> selection, query_target, XX-> selection, WIN, guts. last_time);\n XFlush( DISP);\n XCHECKPOINT;\n\n /* wait for SelectionNotify */\n spd. selection = XX-> selection;\n spd. mask = SELECTION_NOTIFY_MASK;\n while ( 1) {\n XIfEvent( DISP, &ev, (XIfEventProcType)selection_filter, (char*)&spd);\n if ( ev. type != SelectionNotify) {\n prima_handle_event( &ev, nil);\n continue;\n }\n if ( ev. xselection. property == None) goto FAIL;\n Cdebug(\"clipboard:read SelectionNotify %s %s\\n\",\n XGetAtomName(DISP, ev. xselection. property),\n XGetAtomName(DISP, ev. xselection. target));\n gettimeofday( &timeout, nil);\n delay = 2 * (( timeout. tv_sec - start_time. tv_sec) * 1000 + \n ( timeout. tv_usec - start_time. tv_usec) / 1000) + guts. clipboard_event_timeout;\n start_time = timeout;\n if ( read_property( ev. xselection. property, &type, &format, &size, &data) > RPS_PARTIAL) \n goto FAIL;\n XFlush( DISP);\n break;\n }\n XCHECKPOINT;\n\n if ( type != XA_INCR) { /* ordinary, single-property selection */\n if ( format != CF_FORMAT(id) || type != query_type) {\n\t if ( format != CF_FORMAT(id)) \n\t Cdebug(\"clipboard: id=%d: formats mismatch: got %d, want %d\\n\", id, format, CF_FORMAT(id));\n\t if ( type != query_type) \n\t Cdebug(\"clipboard: id=%d: types mismatch: got %s, want %s\\n\", id,\n\t\t XGetAtomName(DISP,type), XGetAtomName(DISP,query_type));\n\t return false;\n }\n XX-> external[id]. size = size;\n XX-> external[id]. data = data;\n XX-> external[id]. name = query_target;\n return true;\n }\n\n /* setup INCR */\n if ( format != CF_32 || size < 4) goto FAIL;\n incr = (unsigned long) *(( Atom*) data);\n if ( incr == 0) goto FAIL;\n size = 0;\n spd. mask = PROPERTY_NOTIFY_MASK;\n\n while ( 1) {\n /* wait for PropertyNotify */ \n while ( XCheckIfEvent( DISP, &ev, (XIfEventProcType)selection_filter, (char*)&spd) == False) {\n gettimeofday( &timeout, nil);\n if ((( timeout. tv_sec - start_time. tv_sec) * 1000 + \n ( timeout. tv_usec - start_time. tv_usec) / 1000) > delay) \n goto END_LOOP;\n }\n if ( ev. type != PropertyNotify) {\n prima_handle_event( &ev, nil);\n continue;\n }\n if ( ev. xproperty. state != PropertyNewValue) continue;\n start_time = timeout;\n old_size = size;\n\n rps = read_property( ev. xproperty. atom, &type, &format, &size, &data);\n XFlush( DISP);\n if ( rps == RPS_NODATA) continue;\n if ( rps == RPS_ERROR) goto FAIL; \n if ( format != CF_FORMAT(id) || type != CF_TYPE(id)) return false;\n if ( size > incr || /* read all in INCR */\n rps == RPS_PARTIAL || /* failed somewhere */\n ( size == incr && old_size == size) /* wait for empty PropertyNotify otherwise */\n ) break;\n }\nEND_LOOP:\n XCHECKPOINT;\n\n XX-> external[id]. size = size;\n XX-> external[id]. data = data;\n XX-> external[id]. name = query_target;\n return true;\n \nFAIL:\n XCHECKPOINT;\n free( data);\n return false;\n}\n\n\nstatic Bool\nquery_data( Handle self, Handle id)\n{\n Atom name, type;\n int index = 0;\n while (( name = get_typename( id, index++, &type)) != None) {\n if ( query_datum( self, id, name, type)) return true;\n }\n return false;\n}\n\nstatic Atom\nfind_atoms( Atom * data, int length, int id)\n{\n int i, index = 0;\n Atom name;\n \n while (( name = get_typename( id, index++, nil)) != None) {\n for ( i = 0; i < length / sizeof(Atom); i++) {\n if ( data[i] == name) \n return name;\n }\n }\n return None;\n}\n\n\nBool\napc_clipboard_has_format( Handle self, Handle id)\n{\n DEFCC;\n if ( id < 0 || id >= guts. clipboard_formats_count) return false;\n\n if ( XX-> inside_event) {\n return XX-> internal[id]. size > 0 || XX-> external[id]. size > 0;\n } else {\n if ( XX-> internal[id]. size > 0) return true;\n\n if ( XX-> external[cfTargets]. size == 0) {\n /* read TARGETS, which as array of ATOMs */\n query_data( self, cfTargets);\n\n if ( XX-> external[cfTargets].size > 0) {\n int i, size = XX-> external[cfTargets].size;\n Atom * data = ( Atom*)(XX-> external[cfTargets]. data);\n Atom ret;\n\n \n Cdebug(\"clipboard targets:\");\n for ( i = 0; i < size/4; i++) \n Cdebug(\"%s\\n\", XGetAtomName( DISP, data[i]));\n\n /* find our index for TARGETS[i], assign CFDATA_NOT_ACQUIRED to it */\n for ( i = 0; i < guts. clipboard_formats_count; i++) {\n if ( i == cfTargets) continue;\n ret = find_atoms( data, size, i);\n if ( ret != None && (\n XX-> external[i]. size == 0 ||\n XX-> external[i]. size == CFDATA_ERROR\n )\n ) { \n XX-> external[i]. size = CFDATA_NOT_ACQUIRED;\n XX-> external[i]. name = ret;\n }\n }\n\n if ( XX-> external[id]. size == 0 || \n XX-> external[id]. size == CFDATA_ERROR)\n return false;\n }\n }\n \n if ( XX-> external[id]. size > 0 || \n XX-> external[id]. size == CFDATA_NOT_ACQUIRED)\n return true;\n\n if ( XX-> external[id]. size == CFDATA_ERROR) \n return false;\n\n /* selection owner does not support TARGETS, so peek */\n if ( XX-> external[cfTargets]. size == 0 && XX-> internal[id]. size == 0)\n return query_data( self, id);\n }\n return false;\n}\n\nBool\napc_clipboard_get_data( Handle self, Handle id, PClipboardDataRec c)\n{\n DEFCC;\n STRLEN size;\n unsigned char * data;\n Atom name;\n\n if ( id < 0 || id >= guts. clipboard_formats_count) return false;\n\n if ( !XX-> inside_event) {\n if ( XX-> internal[id]. size == 0) {\n if ( XX-> external[id]. size == CFDATA_NOT_ACQUIRED) {\n if ( !query_data( self, id)) return false;\n }\n if ( XX-> external[id]. size == CFDATA_ERROR) return false;\n }\n }\n if ( XX-> internal[id]. size == CFDATA_ERROR) return false;\n\n if ( XX-> internal[id]. size > 0) {\n size = XX-> internal[id]. size;\n data = XX-> internal[id]. data;\n name = XX-> internal[id]. name;\n } else {\n size = XX-> external[id]. size;\n data = XX-> external[id]. data;\n name = XX-> external[id]. name;\n }\n if ( size == 0 || data == nil) return false;\n\n switch ( id) {\n case cfBitmap: {\n Handle img = c-> image; \n XWindow foo;\n Pixmap px = *(( Pixmap*)( data));\n unsigned int dummy, x, y, d;\n int bar;\n \n if ( !XGetGeometry( DISP, px, &foo, &bar, &bar, &x, &y, &dummy, &d))\n return false;\n CImage( img)-> create_empty( img, x, y, ( d == 1) ? imBW : guts. qdepth);\n if ( !prima_std_query_image( img, px)) return false;\n break;}\n case cfText:\n case cfUTF8: {\n void * ret = malloc( size);\n if ( !ret) {\n warn(\"Not enough memory: %d bytes\\n\", (int)size);\n return false;\n }\n memcpy( ret, data, size);\n c-> data = ret;\n c-> length = size;\n break;}\n default: {\n void * ret = malloc( size);\n if ( !ret) {\n warn(\"Not enough memory: %d bytes\\n\", (int)size);\n return false;\n }\n memcpy( ret, data, size);\n c-> data = ( Byte * ) ret;\n c-> length = size;\n break;}\n }\n return true;\n}\n\nBool\napc_clipboard_set_data( Handle self, Handle id, PClipboardDataRec c)\n{\n DEFCC;\n if ( id < 0 || id >= guts. clipboard_formats_count) return false;\n\n if ( id >= cfTargets && id < cfCOUNT ) return false;\n detach_xfers( XX, id, true);\n clipboard_kill_item( XX-> internal, id);\n\n switch ( id) {\n case cfBitmap: { \n Pixmap px = prima_std_pixmap( c-> image, CACHE_LOW_RES);\n if ( px) {\n if ( !( XX-> internal[cfBitmap]. data = malloc( sizeof( px)))) {\n XFreePixmap( DISP, px);\n return false;\n }\n XX-> internal[cfBitmap]. size = sizeof(px);\n memcpy( XX-> internal[cfBitmap]. data, &px, sizeof(px));\n } else\n return false;\n break;}\n default:\n if ( !( XX-> internal[id]. data = malloc( c-> length))) \n return false;\n XX-> internal[id]. size = c-> length;\n memcpy( XX-> internal[id]. data, c-> data, c-> length);\n break;\n }\n XX-> need_write = true; \n return true;\n}\n\nstatic Bool\nexpand_clipboards( Handle self, int keyLen, void * key, void * dummy)\n{\n DEFCC;\n PClipboardDataItem f;\n\n if ( !( f = realloc( XX-> internal, \n sizeof( ClipboardDataItem) * guts. clipboard_formats_count))) {\n guts. clipboard_formats_count--;\n return true;\n }\n f[ guts. clipboard_formats_count-1].size = 0;\n f[ guts. clipboard_formats_count-1].data = nil;\n f[ guts. clipboard_formats_count-1].name = CF_NAME(guts. clipboard_formats_count-1);\n XX-> internal = f;\n if ( !( f = realloc( XX-> external, \n sizeof( ClipboardDataItem) * guts. clipboard_formats_count))) {\n guts. clipboard_formats_count--;\n return true;\n }\n f[ guts. clipboard_formats_count-1].size = 0;\n f[ guts. clipboard_formats_count-1].data = nil;\n f[ guts. clipboard_formats_count-1].name = CF_NAME(guts. clipboard_formats_count-1);\n XX-> external = f;\n return false;\n}\n\nHandle\napc_clipboard_register_format( Handle self, const char* format)\n{\n int i;\n Atom x = XInternAtom( DISP, format, false);\n Atom *f;\n\n for ( i = 0; i < guts. clipboard_formats_count; i++) {\n if ( x == CF_NAME(i)) \n return i;\n }\n\n if ( !( f = realloc( guts. clipboard_formats, \n sizeof( Atom) * 3 * ( guts. clipboard_formats_count + 1)))) \n return false;\n \n guts. clipboard_formats = f;\n CF_ASSIGN( guts. clipboard_formats_count, x, x, 8); \n guts. clipboard_formats_count++;\n\n if ( hash_first_that( guts. clipboards, (void*)expand_clipboards, nil, nil, nil))\n return -1;\n\n return guts. clipboard_formats_count - 1;\n}\n\nBool\napc_clipboard_deregister_format( Handle self, Handle id)\n{\n return true;\n}\n\nApiHandle\napc_clipboard_get_handle( Handle self)\n{\n return C(self)-> selection;\n}\n\nstatic Bool\ndelete_xfers( Handle self, int keyLen, void * key, XWindow * window)\n{\n DEFCC;\n if ( XX-> xfers) {\n int i;\n for ( i = 0; i < XX-> xfers-> count; i++) \n delete_xfer( XX, ( ClipboardXfer*) XX-> xfers-> items[i]); \n }\n hash_delete( guts. clipboard_xfers, window, sizeof( XWindow), false);\n return false; \n}\n\nvoid\nprima_handle_selection_event( XEvent *ev, XWindow win, Handle self)\n{\n XCHECKPOINT;\n switch ( ev-> type) {\n case SelectionRequest: {\n XEvent xe;\n int i, id = -1;\n Atom prop = ev-> xselectionrequest. property,\n target = ev-> xselectionrequest. target;\n self = ( Handle) hash_fetch( guts. clipboards, &ev-> xselectionrequest. selection, sizeof( Atom)); \n\n guts. last_time = ev-> xselectionrequest. time;\n xe. type = SelectionNotify;\n xe. xselection. send_event = true;\n xe. xselection. serial = ev-> xselectionrequest. serial;\n xe. xselection. display = ev-> xselectionrequest. display;\n xe. xselection. requestor = ev-> xselectionrequest. requestor;\n xe. xselection. selection = ev-> xselectionrequest. selection;\n xe. xselection. target = target;\n xe. xselection. property = None;\n xe. xselection. time = ev-> xselectionrequest. time;\n \n Cdebug(\"from %08x %s at %s\\n\", ev-> xselectionrequest. requestor, \n XGetAtomName( DISP, ev-> xselectionrequest. target),\n XGetAtomName( DISP, ev-> xselectionrequest. property)\n );\n\n if ( self) { \n PClipboardSysData CC = C(self);\n Bool event = CC-> inside_event;\n int format, utf8_mime = 0;\n\n for ( i = 0; i < guts. clipboard_formats_count; i++) {\n if ( xe. xselection. target == CC-> internal[i]. name) {\n id = i;\n break;\n } else if ( i == cfUTF8 && xe. xselection. target == UTF8_MIME) {\n id = i;\n utf8_mime = 1;\n break;\n }\n }\n if ( id < 0) goto SEND_EMPTY;\n for ( i = 0; i < guts. clipboard_formats_count; i++)\n clipboard_kill_item( CC-> external, i);\n \n CC-> target = xe. xselection. target;\n CC-> need_write = false;\n \n CC-> inside_event = true;\n /* XXX cmSelection */\n CC-> inside_event = event;\n\n format = CF_FORMAT(id);\n target = CF_TYPE( id);\n if ( utf8_mime) target = UTF8_MIME;\n\n if ( id == cfTargets) { \n int count = 0, have_utf8 = 0;\n Atom * ci;\n for ( i = 0; i < guts. clipboard_formats_count; i++) {\n if ( i != cfTargets && CC-> internal[i]. size > 0) {\n count++;\n\t\t if ( i == cfUTF8) {\n\t\t count++;\n\t\t have_utf8 = 1;\n\t\t }\n\t }\n\t }\n detach_xfers( CC, cfTargets, true);\n clipboard_kill_item( CC-> internal, cfTargets);\n if (( CC-> internal[cfTargets]. data = malloc( count * sizeof( Atom)))) {\n CC-> internal[cfTargets]. size = count * sizeof( Atom);\n ci = (Atom*)CC-> internal[cfTargets]. data;\n for ( i = 0; i < guts. clipboard_formats_count; i++) \n if ( i != cfTargets && CC-> internal[i]. size > 0) \n *(ci++) = CF_NAME(i);\n if ( have_utf8) \n\t\t *(ci++) = UTF8_MIME;\n }\n }\n \n if ( CC-> internal[id]. size > 0) {\n Atom incr;\n int mode = PropModeReplace;\n unsigned char * data = CC-> internal[id]. data;\n unsigned long size = CC-> internal[id]. size * 8 / format;\n if ( CC-> internal[id]. size > guts. limits. request_length - 4) {\n int ok = 0;\n int reqlen = guts. limits. request_length - 4;\n /* INCR */\n if ( !guts. clipboard_xfers)\n guts. clipboard_xfers = hash_create();\n if ( !CC-> xfers) \n CC-> xfers = plist_create( 1, 1);\n if ( CC-> xfers && guts. clipboard_xfers) {\n ClipboardXfer * x = malloc( sizeof( ClipboardXfer));\n if ( x) {\n IV refcnt;\n ClipboardXferKey key;\n \n bzero( x, sizeof( ClipboardXfer));\n list_add( CC-> xfers, ( Handle) x);\n x-> size = CC-> internal[id]. size;\n x-> data = CC-> internal[id]. data;\n x-> blocks = ( x-> size / reqlen ) + ( x-> size % reqlen) ? 1 : 0;\n x-> requestor = xe. xselection. requestor;\n x-> property = prop;\n x-> target = xe. xselection. target;\n x-> self = self;\n x-> format = format;\n x-> id = id;\n gettimeofday( &x-> time, nil);\n\n CLIPBOARD_XFER_KEY( key, x-> requestor, x-> property);\n hash_store( guts. clipboard_xfers, key, sizeof(key), (void*) x);\n refcnt = PTR2IV( hash_fetch( guts. clipboard_xfers, &x-> requestor, sizeof( XWindow)));\n if ( refcnt++ == 0)\n XSelectInput( DISP, x-> requestor, PropertyChangeMask|StructureNotifyMask); \n hash_store( guts. clipboard_xfers, &x-> requestor, sizeof(XWindow), INT2PTR( void*, refcnt));\n\n format = CF_32;\n size = 1;\n incr = ( Atom) CC-> internal[id]. size;\n data = ( unsigned char*) &incr; \n ok = 1;\n target = XA_INCR;\n Cdebug(\"clpboard: init INCR for %08x %d\\n\", x-> requestor, x-> property);\n }\n }\n if ( !ok) size = reqlen;\n }\n\n if ( format == CF_32) format = 32;\n XChangeProperty( \n xe. xselection. display,\n xe. xselection. requestor,\n prop, target, format, mode, data, size);\n Cdebug(\"clipboard: store prop %s\\n\", XGetAtomName( DISP, prop));\n xe. xselection. property = prop;\n }\n\n /* content of PIXMAP or BITMAP is seemingly gets invalidated\n after the selection transfer, unlike the string data format */\n if ( id == cfBitmap) {\n bzero( CC-> internal[id].data, CC-> internal[id].size);\n bzero( CC-> external[id].data, CC-> external[id].size);\n clipboard_kill_item( CC-> internal, id);\n clipboard_kill_item( CC-> external, id);\n }\n }\nSEND_EMPTY:\n XSendEvent( xe.xselection.display, xe.xselection.requestor, false, 0, &xe);\n XFlush( DISP);\n Cdebug(\"clipboard:id %d, SelectionNotify to %08x , %s %s\\n\", id, xe.xselection.requestor, \n XGetAtomName( DISP, xe. xselection. property),\n XGetAtomName( DISP, xe. xselection. target)); \n } break;\n case SelectionClear: \n guts. last_time = ev-> xselectionclear. time;\n if ( XGetSelectionOwner( DISP, ev-> xselectionclear. selection) != WIN) {\n Handle c = ( Handle) hash_fetch( guts. clipboards, \n &ev-> xselectionclear. selection, sizeof( Atom)); \n guts. last_time = ev-> xselectionclear. time;\n if (c) {\n int i;\n C(c)-> selection_owner = nilHandle; \n for ( i = 0; i < guts. clipboard_formats_count; i++) {\n detach_xfers( C(c), i, true);\n clipboard_kill_item( C(c)-> external, i);\n clipboard_kill_item( C(c)-> internal, i);\n }\n }\n } \n break;\n case PropertyNotify:\n if ( ev-> xproperty. state == PropertyDelete) {\n unsigned long offs, size, reqlen = guts. limits. request_length - 4, format;\n ClipboardXfer * x = ( ClipboardXfer *) self;\n PClipboardSysData CC = C(x-> self);\n offs = x-> offset * reqlen;\n if ( offs >= x-> size) { /* clear termination */\n size = 0; \n offs = 0;\n } else {\n size = x-> size - offs;\n if ( size > reqlen) size = reqlen;\n }\n Cdebug(\"clipboard: put %d %d in %08x %d\\n\", x-> offset, size, x-> requestor, x-> property); \n if ( x-> format > 8) size /= 2;\n if ( x-> format > 16) size /= 2;\n\t format = ( x-> format == CF_32) ? 32 : x-> format;\n XChangeProperty( DISP, x-> requestor, x-> property, x-> target,\n format, PropModeReplace, \n x-> data + offs, size);\n XFlush( DISP);\n x-> offset++;\n if ( size == 0) delete_xfer( CC, x);\n }\n break;\n case DestroyNotify:\n Cdebug(\"clipboard: destroy xfers at %08x\\n\", ev-> xdestroywindow. window);\n hash_first_that( guts. clipboards, (void*)delete_xfers, (void*) &ev-> xdestroywindow. window, nil, nil);\n XFlush( DISP);\n break;\n }\n XCHECKPOINT;\n}\n\n", "meta": {"content_hash": "b524844abd8f17aad1bb619d75502ef3", "timestamp": "", "source": "github", "line_count": 1008, "max_line_length": 114, "avg_line_length": 31.44047619047619, "alnum_prop": 0.541713997223274, "repo_name": "run4flat/Primo", "id": "2131edac613fcff0a4faf7d06534c35e2f2a0cc8", "size": "33097", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "unix/apc_clipboard.c", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "Apex", "bytes": "56518"}, {"name": "C", "bytes": "1660328"}, {"name": "Perl", "bytes": "268749"}]}} {"text": "\n\npackage scaleio\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"path/filepath\"\n\t\"strconv\"\n\t\"strings\"\n\n\t\"k8s.io/klog/v2\"\n\t\"k8s.io/mount-utils\"\n\tutilstrings \"k8s.io/utils/strings\"\n\n\tapi \"k8s.io/api/core/v1\"\n\t\"k8s.io/apimachinery/pkg/api/resource\"\n\tmeta \"k8s.io/apimachinery/pkg/apis/meta/v1\"\n\t\"k8s.io/apimachinery/pkg/types\"\n\t\"k8s.io/apimachinery/pkg/util/uuid\"\n\tvolumehelpers \"k8s.io/cloud-provider/volume/helpers\"\n\t\"k8s.io/kubernetes/pkg/volume\"\n\t\"k8s.io/kubernetes/pkg/volume/util\"\n)\n\ntype sioVolume struct {\n\tsioMgr *sioMgr\n\tplugin *sioPlugin\n\tpod *api.Pod\n\tpodUID types.UID\n\tspec *volume.Spec\n\tsecretName string\n\tsecretNamespace string\n\tvolSpecName string\n\tvolName string\n\treadOnly bool\n\tfsType string\n\toptions volume.VolumeOptions\n\tconfigData map[string]string\n\n\tvolume.MetricsNil\n}\n\nconst (\n\tminimumVolumeSizeGiB = 8\n)\n\n// *******************\n// volume.Volume Impl\nvar _ volume.Volume = &sioVolume{}\n\n// GetPath returns the path where the volume will be mounted.\nfunc (v *sioVolume) GetPath() string {\n\treturn v.plugin.host.GetPodVolumeDir(\n\t\tv.podUID,\n\t\tutilstrings.EscapeQualifiedName(sioPluginName),\n\t\tv.volSpecName)\n}\n\n// *************\n// Mounter Impl\n// *************\nvar _ volume.Mounter = &sioVolume{}\n\n// CanMount checks to verify that the volume can be mounted prior to Setup.\n// A nil error indicates that the volume is ready for mounitnig.\nfunc (v *sioVolume) CanMount() error {\n\treturn nil\n}\n\nfunc (v *sioVolume) SetUp(mounterArgs volume.MounterArgs) error {\n\treturn v.SetUpAt(v.GetPath(), mounterArgs)\n}\n\n// SetUp bind mounts the disk global mount to the volume path.\nfunc (v *sioVolume) SetUpAt(dir string, mounterArgs volume.MounterArgs) error {\n\tv.plugin.volumeMtx.LockKey(v.volSpecName)\n\tdefer v.plugin.volumeMtx.UnlockKey(v.volSpecName)\n\n\tklog.V(4).Info(log(\"setting up volume for PV.spec %s\", v.volSpecName))\n\tif err := v.setSioMgr(); err != nil {\n\t\tklog.Error(log(\"setup failed to create scalio manager: %v\", err))\n\t\treturn err\n\t}\n\n\tmounter := v.plugin.host.GetMounter(v.plugin.GetPluginName())\n\tnotDevMnt, err := mounter.IsLikelyNotMountPoint(dir)\n\tif err != nil && !os.IsNotExist(err) {\n\t\tklog.Error(log(\"IsLikelyNotMountPoint test failed for dir %v\", dir))\n\t\treturn err\n\t}\n\tif !notDevMnt {\n\t\tklog.V(4).Info(log(\"skipping setup, dir %s already a mount point\", v.volName))\n\t\treturn nil\n\t}\n\n\t// should multiple-mapping be enabled\n\tenableMultiMaps := false\n\tisROM := false\n\tif v.spec.PersistentVolume != nil {\n\t\tams := v.spec.PersistentVolume.Spec.AccessModes\n\t\tfor _, am := range ams {\n\t\t\tif am == api.ReadOnlyMany {\n\t\t\t\tenableMultiMaps = true\n\t\t\t\tisROM = true\n\t\t\t}\n\t\t}\n\t}\n\tklog.V(4).Info(log(\"multiple mapping enabled = %v\", enableMultiMaps))\n\n\tvolName := v.volName\n\tdevicePath, err := v.sioMgr.AttachVolume(volName, enableMultiMaps)\n\tif err != nil {\n\t\tklog.Error(log(\"setup of volume %v: %v\", v.volSpecName, err))\n\t\treturn err\n\t}\n\toptions := []string{}\n\tswitch {\n\tdefault:\n\t\toptions = append(options, \"rw\")\n\tcase isROM && !v.readOnly:\n\t\toptions = append(options, \"rw\")\n\tcase isROM:\n\t\toptions = append(options, \"ro\")\n\tcase v.readOnly:\n\t\toptions = append(options, \"ro\")\n\t}\n\n\tklog.V(4).Info(log(\"mounting device %s -> %s\", devicePath, dir))\n\tif err := os.MkdirAll(dir, 0750); err != nil {\n\t\tklog.Error(log(\"failed to create dir %#v: %v\", dir, err))\n\t\treturn err\n\t}\n\tklog.V(4).Info(log(\"setup created mount point directory %s\", dir))\n\n\tdiskMounter := util.NewSafeFormatAndMountFromHost(v.plugin.GetPluginName(), v.plugin.host)\n\terr = diskMounter.FormatAndMount(devicePath, dir, v.fsType, options)\n\n\tif err != nil {\n\t\tklog.Error(log(\"mount operation failed during setup: %v\", err))\n\t\tif err := os.Remove(dir); err != nil && !os.IsNotExist(err) {\n\t\t\tklog.Error(log(\"failed to remove dir %s during a failed mount at setup: %v\", dir, err))\n\t\t\treturn err\n\t\t}\n\t\treturn err\n\t}\n\n\tif !v.readOnly && mounterArgs.FsGroup != nil {\n\t\tklog.V(4).Info(log(\"applying value FSGroup ownership\"))\n\t\tvolume.SetVolumeOwnership(v, mounterArgs.FsGroup, mounterArgs.FSGroupChangePolicy, util.FSGroupCompleteHook(v.plugin.GetPluginName()))\n\t}\n\n\tklog.V(4).Info(log(\"successfully setup PV %s: volume %s mapped as %s mounted at %s\", v.volSpecName, v.volName, devicePath, dir))\n\treturn nil\n}\n\nfunc (v *sioVolume) GetAttributes() volume.Attributes {\n\treturn volume.Attributes{\n\t\tReadOnly: v.readOnly,\n\t\tManaged: !v.readOnly,\n\t\tSupportsSELinux: true,\n\t}\n}\n\n// **********************\n// volume.Unmounter Impl\n// *********************\nvar _ volume.Unmounter = &sioVolume{}\n\n// TearDownAt unmounts the bind mount\nfunc (v *sioVolume) TearDown() error {\n\treturn v.TearDownAt(v.GetPath())\n}\n\n// TearDown unmounts and remove the volume\nfunc (v *sioVolume) TearDownAt(dir string) error {\n\tv.plugin.volumeMtx.LockKey(v.volSpecName)\n\tdefer v.plugin.volumeMtx.UnlockKey(v.volSpecName)\n\n\tmounter := v.plugin.host.GetMounter(v.plugin.GetPluginName())\n\tdev, _, err := mount.GetDeviceNameFromMount(mounter, dir)\n\tif err != nil {\n\t\tklog.Errorf(log(\"failed to get reference count for volume: %s\", dir))\n\t\treturn err\n\t}\n\n\tklog.V(4).Info(log(\"attempting to unmount %s\", dir))\n\tif err := mount.CleanupMountPoint(dir, mounter, false); err != nil {\n\t\tklog.Error(log(\"teardown failed while unmounting dir %s: %v \", dir, err))\n\t\treturn err\n\t}\n\tklog.V(4).Info(log(\"dir %s unmounted successfully\", dir))\n\n\t// detach/unmap\n\tkvh, ok := v.plugin.host.(volume.KubeletVolumeHost)\n\tif !ok {\n\t\treturn fmt.Errorf(\"plugin volume host does not implement KubeletVolumeHost interface\")\n\t}\n\thu := kvh.GetHostUtil()\n\tdeviceBusy, err := hu.DeviceOpened(dev)\n\tif err != nil {\n\t\tklog.Error(log(\"teardown unable to get status for device %s: %v\", dev, err))\n\t\treturn err\n\t}\n\n\t// Detach volume from node:\n\t// use \"last attempt wins\" strategy to detach volume from node\n\t// only allow volume to detach when it is not busy (not being used by other pods)\n\tif !deviceBusy {\n\t\tklog.V(4).Info(log(\"teardown is attempting to detach/unmap volume for PV %s\", v.volSpecName))\n\t\tif err := v.resetSioMgr(); err != nil {\n\t\t\tklog.Error(log(\"teardown failed, unable to reset scalio mgr: %v\", err))\n\t\t}\n\t\tvolName := v.volName\n\t\tif err := v.sioMgr.DetachVolume(volName); err != nil {\n\t\t\tklog.Warning(log(\"warning: detaching failed for volume %s: %v\", volName, err))\n\t\t\treturn nil\n\t\t}\n\t\tklog.V(4).Infof(log(\"teardown of volume %v detached successfully\", volName))\n\t}\n\treturn nil\n}\n\n// ********************\n// volume.Deleter Impl\n// ********************\nvar _ volume.Deleter = &sioVolume{}\n\nfunc (v *sioVolume) Delete() error {\n\tklog.V(4).Info(log(\"deleting pvc %s\", v.volSpecName))\n\n\tif err := v.setSioMgrFromSpec(); err != nil {\n\t\tklog.Error(log(\"delete failed while setting sio manager: %v\", err))\n\t\treturn err\n\t}\n\n\terr := v.sioMgr.DeleteVolume(v.volName)\n\tif err != nil {\n\t\tklog.Error(log(\"failed to delete volume %s: %v\", v.volName, err))\n\t\treturn err\n\t}\n\n\tklog.V(4).Info(log(\"successfully deleted PV %s with volume %s\", v.volSpecName, v.volName))\n\treturn nil\n}\n\n// ************************\n// volume.Provisioner Impl\n// ************************\nvar _ volume.Provisioner = &sioVolume{}\n\nfunc (v *sioVolume) Provision(selectedNode *api.Node, allowedTopologies []api.TopologySelectorTerm) (*api.PersistentVolume, error) {\n\tklog.V(4).Info(log(\"attempting to dynamically provision pvc %v\", v.options.PVC.Name))\n\n\tif !util.AccessModesContainedInAll(v.plugin.GetAccessModes(), v.options.PVC.Spec.AccessModes) {\n\t\treturn nil, fmt.Errorf(\"invalid AccessModes %v: only AccessModes %v are supported\", v.options.PVC.Spec.AccessModes, v.plugin.GetAccessModes())\n\t}\n\n\tif util.CheckPersistentVolumeClaimModeBlock(v.options.PVC) {\n\t\treturn nil, fmt.Errorf(\"%s does not support block volume provisioning\", v.plugin.GetPluginName())\n\t}\n\n\t// setup volume attrributes\n\tgenName := v.generateName(\"k8svol\", 11)\n\n\tcapacity := v.options.PVC.Spec.Resources.Requests[api.ResourceName(api.ResourceStorage)]\n\n\tvolSizeGiB, err := volumehelpers.RoundUpToGiB(capacity)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif volSizeGiB < minimumVolumeSizeGiB {\n\t\tvolSizeGiB = minimumVolumeSizeGiB\n\t\tklog.V(4).Info(log(\"capacity less than 8Gi found, adjusted to %dGi\", volSizeGiB))\n\n\t}\n\n\t// create sio manager\n\tif err := v.setSioMgrFromConfig(); err != nil {\n\t\tklog.Error(log(\"provision failed while setting up sio mgr: %v\", err))\n\t\treturn nil, err\n\t}\n\n\t// create volume\n\tvolName := genName\n\tvol, err := v.sioMgr.CreateVolume(volName, volSizeGiB)\n\tif err != nil {\n\t\tklog.Error(log(\"provision failed while creating volume: %v\", err))\n\t\treturn nil, err\n\t}\n\n\t// prepare data for pv\n\tv.configData[confKey.volumeName] = volName\n\tsslEnabled, err := strconv.ParseBool(v.configData[confKey.sslEnabled])\n\tif err != nil {\n\t\tklog.Warning(log(\"failed to parse parameter sslEnabled, setting to false\"))\n\t\tsslEnabled = false\n\t}\n\treadOnly, err := strconv.ParseBool(v.configData[confKey.readOnly])\n\tif err != nil {\n\t\tklog.Warning(log(\"failed to parse parameter readOnly, setting it to false\"))\n\t\treadOnly = false\n\t}\n\n\t// describe created pv\n\tpvName := genName\n\tpv := &api.PersistentVolume{\n\t\tObjectMeta: meta.ObjectMeta{\n\t\t\tName: pvName,\n\t\t\tNamespace: v.options.PVC.Namespace,\n\t\t\tLabels: map[string]string{},\n\t\t\tAnnotations: map[string]string{\n\t\t\t\tutil.VolumeDynamicallyCreatedByKey: \"scaleio-dynamic-provisioner\",\n\t\t\t},\n\t\t},\n\t\tSpec: api.PersistentVolumeSpec{\n\t\t\tPersistentVolumeReclaimPolicy: v.options.PersistentVolumeReclaimPolicy,\n\t\t\tAccessModes: v.options.PVC.Spec.AccessModes,\n\t\t\tCapacity: api.ResourceList{\n\t\t\t\tapi.ResourceName(api.ResourceStorage): resource.MustParse(\n\t\t\t\t\tfmt.Sprintf(\"%dGi\", volSizeGiB),\n\t\t\t\t),\n\t\t\t},\n\t\t\tPersistentVolumeSource: api.PersistentVolumeSource{\n\t\t\t\tScaleIO: &api.ScaleIOPersistentVolumeSource{\n\t\t\t\t\tGateway: v.configData[confKey.gateway],\n\t\t\t\t\tSSLEnabled: sslEnabled,\n\t\t\t\t\tSecretRef: &api.SecretReference{Name: v.secretName, Namespace: v.secretNamespace},\n\t\t\t\t\tSystem: v.configData[confKey.system],\n\t\t\t\t\tProtectionDomain: v.configData[confKey.protectionDomain],\n\t\t\t\t\tStoragePool: v.configData[confKey.storagePool],\n\t\t\t\t\tStorageMode: v.configData[confKey.storageMode],\n\t\t\t\t\tVolumeName: volName,\n\t\t\t\t\tFSType: v.configData[confKey.fsType],\n\t\t\t\t\tReadOnly: readOnly,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\tif len(v.options.PVC.Spec.AccessModes) == 0 {\n\t\tpv.Spec.AccessModes = v.plugin.GetAccessModes()\n\t}\n\n\tklog.V(4).Info(log(\"provisioner created pv %v and volume %s successfully\", pvName, vol.Name))\n\treturn pv, nil\n}\n\n// setSioMgr creates scaleio mgr from cached config data if found\n// otherwise, setups new config data and create mgr\nfunc (v *sioVolume) setSioMgr() error {\n\tklog.V(4).Info(log(\"setting up sio mgr for spec %s\", v.volSpecName))\n\tpodDir := v.plugin.host.GetPodPluginDir(v.podUID, sioPluginName)\n\tconfigName := filepath.Join(podDir, sioConfigFileName)\n\tif v.sioMgr == nil {\n\t\tconfigData, err := loadConfig(configName) // try to load config if exist\n\t\tif err != nil {\n\t\t\tif !os.IsNotExist(err) {\n\t\t\t\tklog.Error(log(\"failed to load config %s : %v\", configName, err))\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tklog.V(4).Info(log(\"previous config file not found, creating new one\"))\n\t\t\t// prepare config data\n\t\t\tconfigData = make(map[string]string)\n\t\t\tmapVolumeSpec(configData, v.spec)\n\n\t\t\t// additional config data\n\t\t\tconfigData[confKey.secretNamespace] = v.secretNamespace\n\t\t\tconfigData[confKey.secretName] = v.secretName\n\t\t\tconfigData[confKey.volSpecName] = v.volSpecName\n\n\t\t\tif err := validateConfigs(configData); err != nil {\n\t\t\t\tklog.Error(log(\"config setup failed: %s\", err))\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t// persist config\n\t\t\tif err := saveConfig(configName, configData); err != nil {\n\t\t\t\tklog.Error(log(\"failed to save config data: %v\", err))\n\t\t\t\treturn err\n\t\t\t}\n\t\t}\n\t\t// merge in secret\n\t\tif err := attachSecret(v.plugin, v.secretNamespace, configData); err != nil {\n\t\t\tklog.Error(log(\"failed to load secret: %v\", err))\n\t\t\treturn err\n\t\t}\n\n\t\t// merge in Sdc Guid label value\n\t\tif err := attachSdcGUID(v.plugin, configData); err != nil {\n\t\t\tklog.Error(log(\"failed to retrieve sdc guid: %v\", err))\n\t\t\treturn err\n\t\t}\n\t\tmgr, err := newSioMgr(configData, v.plugin.host, v.plugin.host.GetExec(v.plugin.GetPluginName()))\n\n\t\tif err != nil {\n\t\t\tklog.Error(log(\"failed to reset sio manager: %v\", err))\n\t\t\treturn err\n\t\t}\n\n\t\tv.sioMgr = mgr\n\t}\n\treturn nil\n}\n\n// resetSioMgr creates scaleio manager from existing (cached) config data\nfunc (v *sioVolume) resetSioMgr() error {\n\tpodDir := v.plugin.host.GetPodPluginDir(v.podUID, sioPluginName)\n\tconfigName := filepath.Join(podDir, sioConfigFileName)\n\tif v.sioMgr == nil {\n\t\t// load config data from disk\n\t\tconfigData, err := loadConfig(configName)\n\t\tif err != nil {\n\t\t\tklog.Error(log(\"failed to load config data: %v\", err))\n\t\t\treturn err\n\t\t}\n\t\tv.secretName = configData[confKey.secretName]\n\t\tv.secretNamespace = configData[confKey.secretNamespace]\n\t\tv.volName = configData[confKey.volumeName]\n\t\tv.volSpecName = configData[confKey.volSpecName]\n\n\t\t// attach secret\n\t\tif err := attachSecret(v.plugin, v.secretNamespace, configData); err != nil {\n\t\t\tklog.Error(log(\"failed to load secret: %v\", err))\n\t\t\treturn err\n\t\t}\n\n\t\t// merge in Sdc Guid label value\n\t\tif err := attachSdcGUID(v.plugin, configData); err != nil {\n\t\t\tklog.Error(log(\"failed to retrieve sdc guid: %v\", err))\n\t\t\treturn err\n\t\t}\n\t\tmgr, err := newSioMgr(configData, v.plugin.host, v.plugin.host.GetExec(v.plugin.GetPluginName()))\n\n\t\tif err != nil {\n\t\t\tklog.Error(log(\"failed to reset scaleio mgr: %v\", err))\n\t\t\treturn err\n\t\t}\n\t\tv.sioMgr = mgr\n\t}\n\treturn nil\n}\n\n// setSioFromConfig sets up scaleio mgr from an available config data map\n// designed to be called from dynamic provisioner\nfunc (v *sioVolume) setSioMgrFromConfig() error {\n\tklog.V(4).Info(log(\"setting scaleio mgr from available config\"))\n\tif v.sioMgr == nil {\n\t\tapplyConfigDefaults(v.configData)\n\n\t\tv.configData[confKey.volSpecName] = v.volSpecName\n\n\t\tif err := validateConfigs(v.configData); err != nil {\n\t\t\tklog.Error(log(\"config data setup failed: %s\", err))\n\t\t\treturn err\n\t\t}\n\n\t\t// copy config and attach secret\n\t\tdata := map[string]string{}\n\t\tfor k, v := range v.configData {\n\t\t\tdata[k] = v\n\t\t}\n\n\t\tif err := attachSecret(v.plugin, v.secretNamespace, data); err != nil {\n\t\t\tklog.Error(log(\"failed to load secret: %v\", err))\n\t\t\treturn err\n\t\t}\n\t\tmgr, err := newSioMgr(data, v.plugin.host, v.plugin.host.GetExec(v.plugin.GetPluginName()))\n\n\t\tif err != nil {\n\t\t\tklog.Error(log(\"failed while setting scaleio mgr from config: %v\", err))\n\t\t\treturn err\n\t\t}\n\t\tv.sioMgr = mgr\n\t}\n\treturn nil\n}\n\n// setSioMgrFromSpec sets the scaleio manager from a spec object.\n// The spec may be complete or incomplete depending on lifecycle phase.\nfunc (v *sioVolume) setSioMgrFromSpec() error {\n\tklog.V(4).Info(log(\"setting sio manager from spec\"))\n\tif v.sioMgr == nil {\n\t\t// get config data form spec volume source\n\t\tconfigData := map[string]string{}\n\t\tmapVolumeSpec(configData, v.spec)\n\n\t\t// additional config\n\t\tconfigData[confKey.secretNamespace] = v.secretNamespace\n\t\tconfigData[confKey.secretName] = v.secretName\n\t\tconfigData[confKey.volSpecName] = v.volSpecName\n\n\t\tif err := validateConfigs(configData); err != nil {\n\t\t\tklog.Error(log(\"config setup failed: %s\", err))\n\t\t\treturn err\n\t\t}\n\n\t\t// attach secret object to config data\n\t\tif err := attachSecret(v.plugin, v.secretNamespace, configData); err != nil {\n\t\t\tklog.Error(log(\"failed to load secret: %v\", err))\n\t\t\treturn err\n\t\t}\n\t\tmgr, err := newSioMgr(configData, v.plugin.host, v.plugin.host.GetExec(v.plugin.GetPluginName()))\n\n\t\tif err != nil {\n\t\t\tklog.Error(log(\"failed to reset sio manager: %v\", err))\n\t\t\treturn err\n\t\t}\n\t\tv.sioMgr = mgr\n\t}\n\treturn nil\n}\n\nfunc (v *sioVolume) generateName(prefix string, size int) string {\n\treturn fmt.Sprintf(\"%s-%s\", prefix, strings.Replace(string(uuid.NewUUID()), \"-\", \"\", -1)[0:size])\n}\n", "meta": {"content_hash": "4c549c5effcc93c5f43f8883f30e5f82", "timestamp": "", "source": "github", "line_count": 516, "max_line_length": 144, "avg_line_length": 30.606589147286822, "alnum_prop": 0.6855568922940544, "repo_name": "kevensen/kubernetes", "id": "23328428ca084e5ca2ccc2b31ed032a818c6f6bc", "size": "16362", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pkg/volume/scaleio/sio_volume.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "998"}, {"name": "Go", "bytes": "27145101"}, {"name": "HTML", "bytes": "1193990"}, {"name": "Makefile", "bytes": "62023"}, {"name": "Nginx", "bytes": "1013"}, {"name": "Protocol Buffer", "bytes": "242445"}, {"name": "Python", "bytes": "34630"}, {"name": "SaltStack", "bytes": "55886"}, {"name": "Shell", "bytes": "1412739"}]}} {"text": "extern \"Java\"\n{\n namespace javax\n {\n namespace security\n {\n namespace auth\n {\n namespace login\n {\n class FailedLoginException;\n }\n }\n }\n }\n}\n\nclass javax::security::auth::login::FailedLoginException : public ::javax::security::auth::login::LoginException\n{\n\npublic:\n FailedLoginException();\n FailedLoginException(::java::lang::String *);\nprivate:\n static const jlong serialVersionUID = 802556922354616286LL;\npublic:\n static ::java::lang::Class class$;\n};\n\n#endif // __javax_security_auth_login_FailedLoginException__\n", "meta": {"content_hash": "6eb54b5a6e9da1ede7f8561a7b9d5db9", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 112, "avg_line_length": 19.333333333333332, "alnum_prop": 0.6517241379310345, "repo_name": "the-linix-project/linix-kernel-source", "id": "014a472c6d9e21351ef149f970f53ab5a268e814", "size": "837", "binary": false, "copies": "160", "ref": "refs/heads/master", "path": "gccsrc/gcc-4.7.2/libjava/javax/security/auth/login/FailedLoginException.h", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "Ada", "bytes": "38139979"}, {"name": "Assembly", "bytes": "3723477"}, {"name": "Awk", "bytes": "83739"}, {"name": "C", "bytes": "103607293"}, {"name": "C#", "bytes": "55726"}, {"name": "C++", "bytes": "38577421"}, {"name": "CLIPS", "bytes": "6933"}, {"name": "CSS", "bytes": "32588"}, {"name": "Emacs Lisp", "bytes": "13451"}, {"name": "FORTRAN", "bytes": "4294984"}, {"name": "GAP", "bytes": "13089"}, {"name": "Go", "bytes": "11277335"}, {"name": "Haskell", "bytes": "2415"}, {"name": "Java", "bytes": "45298678"}, {"name": "JavaScript", "bytes": "6265"}, {"name": "Matlab", "bytes": "56"}, {"name": "OCaml", "bytes": "148372"}, {"name": "Objective-C", "bytes": "995127"}, {"name": "Objective-C++", "bytes": "436045"}, {"name": "PHP", "bytes": "12361"}, {"name": "Pascal", "bytes": "40318"}, {"name": "Perl", "bytes": "358808"}, {"name": "Python", "bytes": "60178"}, {"name": "SAS", "bytes": "1711"}, {"name": "Scilab", "bytes": "258457"}, {"name": "Shell", "bytes": "2610907"}, {"name": "Tcl", "bytes": "17983"}, {"name": "TeX", "bytes": "1455571"}, {"name": "XSLT", "bytes": "156419"}]}} {"text": "\npackage uk.co.real_logic.aeron.common;\n\nimport java.net.InterfaceAddress;\nimport java.net.NetworkInterface;\nimport java.net.SocketException;\nimport java.util.Enumeration;\nimport java.util.List;\n\ninterface NetworkInterfaceShim\n{\n Enumeration getNetworkInterfaces() throws SocketException;\n List getInterfaceAddresses(NetworkInterface ifc);\n boolean isLoopback(NetworkInterface ifc) throws SocketException;\n\n NetworkInterfaceShim DEFAULT = new NetworkInterfaceShim()\n {\n public Enumeration getNetworkInterfaces() throws SocketException\n {\n return NetworkInterface.getNetworkInterfaces();\n }\n\n public List getInterfaceAddresses(final NetworkInterface ifc)\n {\n return ifc.getInterfaceAddresses();\n }\n\n public boolean isLoopback(final NetworkInterface ifc) throws SocketException\n {\n return ifc.isLoopback();\n }\n };\n}\n", "meta": {"content_hash": "572858c544ddc9dd253a4790b6e0f3ec", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 90, "avg_line_length": 30.303030303030305, "alnum_prop": 0.728, "repo_name": "jessefugitt/Aeron", "id": "11a4ad5d626ba9d0d7afb8b4b986eb31982cedab", "size": "1601", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "aeron-common/src/main/java/uk/co/real_logic/aeron/common/NetworkInterfaceShim.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C++", "bytes": "313662"}, {"name": "CMake", "bytes": "7737"}, {"name": "Java", "bytes": "990516"}, {"name": "Shell", "bytes": "155"}]}} {"text": "// Copyright (c) 2013 The Chromium Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style license that can be\n// found in the LICENSE file.\n\n#include \"content/browser/indexed_db/leveldb/leveldb_database.h\"\n\n#include \n\n#include \"base/basictypes.h\"\n#include \"base/files/file.h\"\n#include \"base/logging.h\"\n#include \"base/memory/scoped_ptr.h\"\n#include \"base/metrics/histogram.h\"\n#include \"base/strings/string16.h\"\n#include \"base/strings/string_piece.h\"\n#include \"base/strings/stringprintf.h\"\n#include \"base/strings/utf_string_conversions.h\"\n#include \"base/sys_info.h\"\n#include \"content/browser/indexed_db/indexed_db_class_factory.h\"\n#include \"content/browser/indexed_db/leveldb/leveldb_comparator.h\"\n#include \"content/browser/indexed_db/leveldb/leveldb_iterator_impl.h\"\n#include \"content/browser/indexed_db/leveldb/leveldb_write_batch.h\"\n#include \"third_party/leveldatabase/env_chromium.h\"\n#include \"third_party/leveldatabase/env_idb.h\"\n#include \"third_party/leveldatabase/src/helpers/memenv/memenv.h\"\n#include \"third_party/leveldatabase/src/include/leveldb/db.h\"\n#include \"third_party/leveldatabase/src/include/leveldb/env.h\"\n#include \"third_party/leveldatabase/src/include/leveldb/filter_policy.h\"\n#include \"third_party/leveldatabase/src/include/leveldb/slice.h\"\n\nusing base::StringPiece;\n\nnamespace content {\n\n// Forcing flushes to disk at the end of a transaction guarantees that the\n// data hit disk, but drastically impacts throughput when the filesystem is\n// busy with background compactions. Not syncing trades off reliability for\n// performance. Note that background compactions which move data from the\n// log to SSTs are always done with reliable writes.\n//\n// Sync writes are necessary on Windows for quota calculations; POSIX\n// calculates file sizes correctly even when not synced to disk.\n#if defined(OS_WIN)\nstatic const bool kSyncWrites = true;\n#else\n// TODO(dgrogan): Either remove the #if block or change this back to false.\n// See http://crbug.com/338385.\nstatic const bool kSyncWrites = true;\n#endif\n\nstatic leveldb::Slice MakeSlice(const StringPiece& s) {\n return leveldb::Slice(s.begin(), s.size());\n}\n\nstatic StringPiece MakeStringPiece(const leveldb::Slice& s) {\n return StringPiece(s.data(), s.size());\n}\n\nLevelDBDatabase::ComparatorAdapter::ComparatorAdapter(\n const LevelDBComparator* comparator)\n : comparator_(comparator) {}\n\nint LevelDBDatabase::ComparatorAdapter::Compare(const leveldb::Slice& a,\n const leveldb::Slice& b) const {\n return comparator_->Compare(MakeStringPiece(a), MakeStringPiece(b));\n}\n\nconst char* LevelDBDatabase::ComparatorAdapter::Name() const {\n return comparator_->Name();\n}\n\n// TODO(jsbell): Support the methods below in the future.\nvoid LevelDBDatabase::ComparatorAdapter::FindShortestSeparator(\n std::string* start,\n const leveldb::Slice& limit) const {}\n\nvoid LevelDBDatabase::ComparatorAdapter::FindShortSuccessor(\n std::string* key) const {}\n\nLevelDBSnapshot::LevelDBSnapshot(LevelDBDatabase* db)\n : db_(db->db_.get()), snapshot_(db_->GetSnapshot()) {}\n\nLevelDBSnapshot::~LevelDBSnapshot() { db_->ReleaseSnapshot(snapshot_); }\n\nLevelDBDatabase::LevelDBDatabase() {}\n\nLevelDBDatabase::~LevelDBDatabase() {\n // db_'s destructor uses comparator_adapter_; order of deletion is important.\n db_.reset();\n comparator_adapter_.reset();\n env_.reset();\n}\n\nstatic leveldb::Status OpenDB(\n leveldb::Comparator* comparator,\n leveldb::Env* env,\n const base::FilePath& path,\n leveldb::DB** db,\n scoped_ptr* filter_policy) {\n filter_policy->reset(leveldb::NewBloomFilterPolicy(10));\n leveldb::Options options;\n options.comparator = comparator;\n options.create_if_missing = true;\n options.paranoid_checks = true;\n options.filter_policy = filter_policy->get();\n options.reuse_logs = true;\n options.compression = leveldb::kSnappyCompression;\n\n // For info about the troubles we've run into with this parameter, see:\n // https://code.google.com/p/chromium/issues/detail?id=227313#c11\n options.max_open_files = 80;\n options.env = env;\n\n // ChromiumEnv assumes UTF8, converts back to FilePath before using.\n leveldb::Status s = leveldb::DB::Open(options, path.AsUTF8Unsafe(), db);\n\n return s;\n}\n\nleveldb::Status LevelDBDatabase::Destroy(const base::FilePath& file_name) {\n leveldb::Options options;\n options.env = leveldb::IDBEnv();\n // ChromiumEnv assumes UTF8, converts back to FilePath before using.\n return leveldb::DestroyDB(file_name.AsUTF8Unsafe(), options);\n}\n\nnamespace {\nclass LockImpl : public LevelDBLock {\n public:\n explicit LockImpl(leveldb::Env* env, leveldb::FileLock* lock)\n : env_(env), lock_(lock) {}\n ~LockImpl() override { env_->UnlockFile(lock_); }\n\n private:\n leveldb::Env* env_;\n leveldb::FileLock* lock_;\n\n DISALLOW_COPY_AND_ASSIGN(LockImpl);\n};\n} // namespace\n\nscoped_ptr LevelDBDatabase::LockForTesting(\n const base::FilePath& file_name) {\n leveldb::Env* env = leveldb::IDBEnv();\n base::FilePath lock_path = file_name.AppendASCII(\"LOCK\");\n leveldb::FileLock* lock = NULL;\n leveldb::Status status = env->LockFile(lock_path.AsUTF8Unsafe(), &lock);\n if (!status.ok())\n return scoped_ptr();\n DCHECK(lock);\n return scoped_ptr(new LockImpl(env, lock));\n}\n\nstatic int CheckFreeSpace(const char* const type,\n const base::FilePath& file_name) {\n std::string name =\n std::string(\"WebCore.IndexedDB.LevelDB.Open\") + type + \"FreeDiskSpace\";\n int64 free_disk_space_in_k_bytes =\n base::SysInfo::AmountOfFreeDiskSpace(file_name) / 1024;\n if (free_disk_space_in_k_bytes < 0) {\n base::Histogram::FactoryGet(\n \"WebCore.IndexedDB.LevelDB.FreeDiskSpaceFailure\",\n 1,\n 2 /*boundary*/,\n 2 /*boundary*/ + 1,\n base::HistogramBase::kUmaTargetedHistogramFlag)->Add(1 /*sample*/);\n return -1;\n }\n int clamped_disk_space_k_bytes = free_disk_space_in_k_bytes > INT_MAX\n ? INT_MAX\n : free_disk_space_in_k_bytes;\n const uint64 histogram_max = static_cast(1e9);\n static_assert(histogram_max <= INT_MAX, \"histogram_max too big\");\n base::Histogram::FactoryGet(name,\n 1,\n histogram_max,\n 11 /*buckets*/,\n base::HistogramBase::kUmaTargetedHistogramFlag)\n ->Add(clamped_disk_space_k_bytes);\n return clamped_disk_space_k_bytes;\n}\n\nstatic void ParseAndHistogramIOErrorDetails(const std::string& histogram_name,\n const leveldb::Status& s) {\n leveldb_env::MethodID method;\n base::File::Error error = base::File::FILE_OK;\n leveldb_env::ErrorParsingResult result =\n leveldb_env::ParseMethodAndError(s, &method, &error);\n if (result == leveldb_env::NONE)\n return;\n std::string method_histogram_name(histogram_name);\n method_histogram_name.append(\".EnvMethod\");\n base::LinearHistogram::FactoryGet(\n method_histogram_name,\n 1,\n leveldb_env::kNumEntries,\n leveldb_env::kNumEntries + 1,\n base::HistogramBase::kUmaTargetedHistogramFlag)->Add(method);\n\n std::string error_histogram_name(histogram_name);\n\n if (result == leveldb_env::METHOD_AND_PFE) {\n DCHECK_LT(error, 0);\n error_histogram_name.append(std::string(\".PFE.\") +\n leveldb_env::MethodIDToString(method));\n base::LinearHistogram::FactoryGet(\n error_histogram_name,\n 1,\n -base::File::FILE_ERROR_MAX,\n -base::File::FILE_ERROR_MAX + 1,\n base::HistogramBase::kUmaTargetedHistogramFlag)->Add(-error);\n }\n}\n\nstatic void ParseAndHistogramCorruptionDetails(\n const std::string& histogram_name,\n const leveldb::Status& status) {\n int error = leveldb_env::GetCorruptionCode(status);\n DCHECK_GE(error, 0);\n std::string corruption_histogram_name(histogram_name);\n corruption_histogram_name.append(\".Corruption\");\n const int kNumPatterns = leveldb_env::GetNumCorruptionCodes();\n base::LinearHistogram::FactoryGet(\n corruption_histogram_name,\n 1,\n kNumPatterns,\n kNumPatterns + 1,\n base::HistogramBase::kUmaTargetedHistogramFlag)->Add(error);\n}\n\nstatic void HistogramLevelDBError(const std::string& histogram_name,\n const leveldb::Status& s) {\n if (s.ok()) {\n NOTREACHED();\n return;\n }\n enum {\n LEVEL_DB_NOT_FOUND,\n LEVEL_DB_CORRUPTION,\n LEVEL_DB_IO_ERROR,\n LEVEL_DB_OTHER,\n LEVEL_DB_MAX_ERROR\n };\n int leveldb_error = LEVEL_DB_OTHER;\n if (s.IsNotFound())\n leveldb_error = LEVEL_DB_NOT_FOUND;\n else if (s.IsCorruption())\n leveldb_error = LEVEL_DB_CORRUPTION;\n else if (s.IsIOError())\n leveldb_error = LEVEL_DB_IO_ERROR;\n base::Histogram::FactoryGet(histogram_name,\n 1,\n LEVEL_DB_MAX_ERROR,\n LEVEL_DB_MAX_ERROR + 1,\n base::HistogramBase::kUmaTargetedHistogramFlag)\n ->Add(leveldb_error);\n if (s.IsIOError())\n ParseAndHistogramIOErrorDetails(histogram_name, s);\n else\n ParseAndHistogramCorruptionDetails(histogram_name, s);\n}\n\nleveldb::Status LevelDBDatabase::Open(const base::FilePath& file_name,\n const LevelDBComparator* comparator,\n scoped_ptr* result,\n bool* is_disk_full) {\n base::TimeTicks begin_time = base::TimeTicks::Now();\n\n scoped_ptr comparator_adapter(\n new ComparatorAdapter(comparator));\n\n leveldb::DB* db;\n scoped_ptr filter_policy;\n const leveldb::Status s = OpenDB(comparator_adapter.get(),\n leveldb::IDBEnv(),\n file_name,\n &db,\n &filter_policy);\n\n if (!s.ok()) {\n HistogramLevelDBError(\"WebCore.IndexedDB.LevelDBOpenErrors\", s);\n int free_space_k_bytes = CheckFreeSpace(\"Failure\", file_name);\n // Disks with <100k of free space almost never succeed in opening a\n // leveldb database.\n if (is_disk_full)\n *is_disk_full = free_space_k_bytes >= 0 && free_space_k_bytes < 100;\n\n LOG(ERROR) << \"Failed to open LevelDB database from \"\n << file_name.AsUTF8Unsafe() << \",\" << s.ToString();\n return s;\n }\n\n UMA_HISTOGRAM_MEDIUM_TIMES(\"WebCore.IndexedDB.LevelDB.OpenTime\",\n base::TimeTicks::Now() - begin_time);\n\n CheckFreeSpace(\"Success\", file_name);\n\n (*result).reset(new LevelDBDatabase);\n (*result)->db_ = make_scoped_ptr(db);\n (*result)->comparator_adapter_ = comparator_adapter.Pass();\n (*result)->comparator_ = comparator;\n (*result)->filter_policy_ = filter_policy.Pass();\n\n return s;\n}\n\nscoped_ptr LevelDBDatabase::OpenInMemory(\n const LevelDBComparator* comparator) {\n scoped_ptr comparator_adapter(\n new ComparatorAdapter(comparator));\n scoped_ptr in_memory_env(leveldb::NewMemEnv(leveldb::IDBEnv()));\n\n leveldb::DB* db;\n scoped_ptr filter_policy;\n const leveldb::Status s = OpenDB(comparator_adapter.get(),\n in_memory_env.get(),\n base::FilePath(),\n &db,\n &filter_policy);\n\n if (!s.ok()) {\n LOG(ERROR) << \"Failed to open in-memory LevelDB database: \" << s.ToString();\n return scoped_ptr();\n }\n\n scoped_ptr result(new LevelDBDatabase);\n result->env_ = in_memory_env.Pass();\n result->db_ = make_scoped_ptr(db);\n result->comparator_adapter_ = comparator_adapter.Pass();\n result->comparator_ = comparator;\n result->filter_policy_ = filter_policy.Pass();\n\n return result.Pass();\n}\n\nleveldb::Status LevelDBDatabase::Put(const StringPiece& key,\n std::string* value) {\n base::TimeTicks begin_time = base::TimeTicks::Now();\n\n leveldb::WriteOptions write_options;\n write_options.sync = kSyncWrites;\n\n const leveldb::Status s =\n db_->Put(write_options, MakeSlice(key), MakeSlice(*value));\n if (!s.ok())\n LOG(ERROR) << \"LevelDB put failed: \" << s.ToString();\n else\n UMA_HISTOGRAM_TIMES(\"WebCore.IndexedDB.LevelDB.PutTime\",\n base::TimeTicks::Now() - begin_time);\n return s;\n}\n\nleveldb::Status LevelDBDatabase::Remove(const StringPiece& key) {\n leveldb::WriteOptions write_options;\n write_options.sync = kSyncWrites;\n\n const leveldb::Status s = db_->Delete(write_options, MakeSlice(key));\n if (!s.IsNotFound())\n LOG(ERROR) << \"LevelDB remove failed: \" << s.ToString();\n return s;\n}\n\nleveldb::Status LevelDBDatabase::Get(const StringPiece& key,\n std::string* value,\n bool* found,\n const LevelDBSnapshot* snapshot) {\n *found = false;\n leveldb::ReadOptions read_options;\n read_options.verify_checksums = true; // TODO(jsbell): Disable this if the\n // performance impact is too great.\n read_options.snapshot = snapshot ? snapshot->snapshot_ : 0;\n\n const leveldb::Status s = db_->Get(read_options, MakeSlice(key), value);\n if (s.ok()) {\n *found = true;\n return s;\n }\n if (s.IsNotFound())\n return leveldb::Status::OK();\n HistogramLevelDBError(\"WebCore.IndexedDB.LevelDBReadErrors\", s);\n LOG(ERROR) << \"LevelDB get failed: \" << s.ToString();\n return s;\n}\n\nleveldb::Status LevelDBDatabase::Write(const LevelDBWriteBatch& write_batch) {\n base::TimeTicks begin_time = base::TimeTicks::Now();\n leveldb::WriteOptions write_options;\n write_options.sync = kSyncWrites;\n\n const leveldb::Status s =\n db_->Write(write_options, write_batch.write_batch_.get());\n if (!s.ok()) {\n HistogramLevelDBError(\"WebCore.IndexedDB.LevelDBWriteErrors\", s);\n LOG(ERROR) << \"LevelDB write failed: \" << s.ToString();\n } else {\n UMA_HISTOGRAM_TIMES(\"WebCore.IndexedDB.LevelDB.WriteTime\",\n base::TimeTicks::Now() - begin_time);\n }\n return s;\n}\n\nscoped_ptr LevelDBDatabase::CreateIterator(\n const LevelDBSnapshot* snapshot) {\n leveldb::ReadOptions read_options;\n read_options.verify_checksums = true; // TODO(jsbell): Disable this if the\n // performance impact is too great.\n read_options.snapshot = snapshot ? snapshot->snapshot_ : 0;\n\n scoped_ptr i(db_->NewIterator(read_options));\n return scoped_ptr(\n IndexedDBClassFactory::Get()->CreateIteratorImpl(i.Pass()));\n}\n\nconst LevelDBComparator* LevelDBDatabase::Comparator() const {\n return comparator_;\n}\n\nvoid LevelDBDatabase::Compact(const base::StringPiece& start,\n const base::StringPiece& stop) {\n const leveldb::Slice start_slice = MakeSlice(start);\n const leveldb::Slice stop_slice = MakeSlice(stop);\n // NULL batch means just wait for earlier writes to be done\n db_->Write(leveldb::WriteOptions(), NULL);\n db_->CompactRange(&start_slice, &stop_slice);\n}\n\nvoid LevelDBDatabase::CompactAll() { db_->CompactRange(NULL, NULL); }\n\n} // namespace content\n", "meta": {"content_hash": "4be9c7d9ff7a51482f588ca06294a5cc", "timestamp": "", "source": "github", "line_count": 429, "max_line_length": 80, "avg_line_length": 36.074592074592076, "alnum_prop": 0.6566942362367537, "repo_name": "hefen1/chromium", "id": "aab1255d145a6e46d3f64e9a602365c9156d5d15", "size": "15476", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "content/browser/indexed_db/leveldb/leveldb_database.cc", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "AppleScript", "bytes": "6973"}, {"name": "Arduino", "bytes": "464"}, {"name": "Assembly", "bytes": "23829"}, {"name": "C", "bytes": "4050888"}, {"name": "C++", "bytes": "227355953"}, {"name": "CSS", "bytes": "970407"}, {"name": "HTML", "bytes": "28896884"}, {"name": "Java", "bytes": "8494381"}, {"name": "JavaScript", "bytes": "19110753"}, {"name": "Makefile", "bytes": "37978"}, {"name": "Objective-C", "bytes": "1276474"}, {"name": "Objective-C++", "bytes": "7755220"}, {"name": "PHP", "bytes": "97817"}, {"name": "PLpgSQL", "bytes": "264470"}, {"name": "Perl", "bytes": "63937"}, {"name": "Protocol Buffer", "bytes": "423501"}, {"name": "Python", "bytes": "7622149"}, {"name": "Shell", "bytes": "478642"}, {"name": "Standard ML", "bytes": "4965"}, {"name": "XSLT", "bytes": "418"}, {"name": "nesC", "bytes": "18347"}]}} {"text": "\n\n/**\n * \\file\n * Protects against replay attacks by comparing with the last\n * unicast or broadcast frame counter of the sender.\n * \\author\n * Konrad Krentz \n */\n\n/**\n * \\addtogroup llsec802154\n * @{\n */\n\n#include \n\n#include \"contiki/llsec/anti-replay.h\"\n#include \"contiki/packetbuf.h\"\n\n/* This node's current frame counter value */\nstatic uint32_t counter;\n\n/*---------------------------------------------------------------------------*/\nvoid\nanti_replay_set_counter(struct net_buf *buf)\n{\n frame802154_frame_counter_t reordered_counter;\n \n reordered_counter.u32 = LLSEC802154_HTONL(++counter);\n \n packetbuf_set_attr(buf, PACKETBUF_ATTR_FRAME_COUNTER_BYTES_0_1, reordered_counter.u16[0]);\n packetbuf_set_attr(buf, PACKETBUF_ATTR_FRAME_COUNTER_BYTES_2_3, reordered_counter.u16[1]);\n}\n/*---------------------------------------------------------------------------*/\nuint32_t\nanti_replay_get_counter(struct net_buf *buf)\n{\n frame802154_frame_counter_t disordered_counter;\n \n disordered_counter.u16[0] = packetbuf_attr(buf, PACKETBUF_ATTR_FRAME_COUNTER_BYTES_0_1);\n disordered_counter.u16[1] = packetbuf_attr(buf, PACKETBUF_ATTR_FRAME_COUNTER_BYTES_2_3);\n \n return LLSEC802154_HTONL(disordered_counter.u32); \n}\n/*---------------------------------------------------------------------------*/\nvoid\nanti_replay_init_info(struct net_buf *buf, struct anti_replay_info *info)\n{\n info->last_broadcast_counter\n = info->last_unicast_counter\n = anti_replay_get_counter(buf);\n}\n/*---------------------------------------------------------------------------*/\nint\nanti_replay_was_replayed(struct net_buf *buf, struct anti_replay_info *info)\n{\n uint32_t received_counter;\n \n received_counter = anti_replay_get_counter(buf);\n \n if(packetbuf_holds_broadcast(buf)) {\n /* broadcast */\n if(received_counter <= info->last_broadcast_counter) {\n return 1;\n } else {\n info->last_broadcast_counter = received_counter;\n return 0;\n }\n } else {\n /* unicast */\n if(received_counter <= info->last_unicast_counter) {\n return 1;\n } else {\n info->last_unicast_counter = received_counter;\n return 0;\n }\n }\n}\n/*---------------------------------------------------------------------------*/\n\n/** @} */\n", "meta": {"content_hash": "1777034a9c991d7db100787e4e62d48b", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 92, "avg_line_length": 28.170731707317074, "alnum_prop": 0.561038961038961, "repo_name": "32bitmicro/zephyr", "id": "0db4d6b72335e7714a52d4ba29fe88b3ac52c49f", "size": "3940", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "net/ip/contiki/llsec/anti-replay.c", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Assembly", "bytes": "160164"}, {"name": "Batchfile", "bytes": "28019"}, {"name": "C", "bytes": "6173727"}, {"name": "C++", "bytes": "222025"}, {"name": "Lex", "bytes": "11196"}, {"name": "Makefile", "bytes": "132295"}, {"name": "Objective-C", "bytes": "1912"}, {"name": "Perl", "bytes": "213268"}, {"name": "Python", "bytes": "109645"}, {"name": "Shell", "bytes": "44817"}, {"name": "Yacc", "bytes": "15396"}]}} {"text": "package com.acework.js.components.bootstrap\n\nimport com.acework.js.utils.{Mappable, Mergeable}\nimport japgolly.scalajs.react._\nimport japgolly.scalajs.react.vdom.prefix_<^._\n\nimport scala.scalajs.js.{UndefOr, undefined}\n\n/**\n * Created by weiyin on 10/03/15.\n */\nobject Well extends BootstrapComponent {\n override type P = Well\n override type S = Unit\n override type B = Unit\n override type N = TopNode\n\n override def defaultProps = Well()\n\n case class Well(bsClass: UndefOr[Classes.Value] = Classes.well,\n bsStyle: UndefOr[Styles.Value] = undefined,\n bsSize: UndefOr[Sizes.Value] = undefined,\n addClasses: String = \"\")\n extends BsProps with MergeableProps[Well] {\n\n def merge(t: Map[String, Any]): Well = implicitly[Mergeable[Well]].merge(this, t)\n\n def asMap: Map[String, Any] = implicitly[Mappable[Well]].toMap(this)\n\n def apply(children: ReactNode*) = component(this, children)\n\n def apply() = component(this)\n }\n\n override val component = ReactComponentB[Well](\"Well\")\n .render { (P, C) =>\n\n // TODO spread props\n <.div(^.classSet1M(P.addClasses, P.bsClassSet))(C)\n\n }.build\n\n}\n", "meta": {"content_hash": "2c0f045ddc884f768217779e99f20d1a", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 85, "avg_line_length": 27.13953488372093, "alnum_prop": 0.6709511568123393, "repo_name": "weiyinteo/scalajs-react-bootstrap", "id": "6db3f50e028d4f9b8030d93ff00d1a9a724bfc3b", "size": "1167", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "core/src/main/scala/com/acework/js/components/bootstrap/Well.scala", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "6518"}, {"name": "HTML", "bytes": "1421"}, {"name": "Scala", "bytes": "334544"}]}} {"text": "\n 4.0.0\n \n edu.clemson\n resolve-master\n 0.0.1-SNAPSHOT\n ../pom.xml\n \n resolve-runtime\n RESOLVE Runtime\n The RESOLVE Runtime\n\n \n src\n \n\n \n \n", "meta": {"content_hash": "9db3f2bd5c8c66d2148862f9b7c04f00", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 201, "avg_line_length": 36.578947368421055, "alnum_prop": 0.6589928057553956, "repo_name": "Welchd1/resolve-lite", "id": "39f4ca5c089d56d93766a7e02d51bc9da14cd42e", "size": "695", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "runtime/pom.xml", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "ANTLR", "bytes": "15010"}, {"name": "Java", "bytes": "717515"}]}} {"text": "package adf.launcher.option;\n\nimport adf.launcher.ConfigKey;\nimport rescuecore2.config.Config;\n\npublic class OptionPoliceOffice extends Option\n{\n\n\t@Override\n\tpublic String getKey()\n\t{\n\t\treturn \"-po\";\n\t}\n\n\t@Override\n\tpublic void setValue(Config config, String[] datas)\n\t{\n\t\tif(datas.length == 2)\n\t\t{\n\t\t\tconfig.setValue(ConfigKey.KEY_POLICE_OFFICE_COUNT, datas[1]);\n\t\t}\n\t}\n}", "meta": {"content_hash": "24f95fa27e0fccff88aa1574153e833d", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 64, "avg_line_length": 16.17391304347826, "alnum_prop": 0.717741935483871, "repo_name": "tkmnet/RCRS-ADF", "id": "cc8bf5d4ac94dcea0fe5311e0208eaa98c61bf98", "size": "372", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "modules/core/src/main/java/adf/launcher/option/OptionPoliceOffice.java", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "Assembly", "bytes": "277"}, {"name": "Batchfile", "bytes": "2394"}, {"name": "C", "bytes": "115742"}, {"name": "C++", "bytes": "4876"}, {"name": "CSS", "bytes": "104604"}, {"name": "GAP", "bytes": "176"}, {"name": "Groovy", "bytes": "1142351"}, {"name": "HTML", "bytes": "28618885"}, {"name": "Java", "bytes": "10250957"}, {"name": "JavaScript", "bytes": "190908"}, {"name": "Objective-C", "bytes": "698"}, {"name": "Objective-C++", "bytes": "442"}, {"name": "Scala", "bytes": "3073"}, {"name": "Shell", "bytes": "16330"}]}} {"text": "\n#ifndef _SET_RADAR_LOD_ACTION_H_\n#define _SET_RADAR_LOD_ACTION_H_\n\n#ifndef _ACTION_H_\n#include \"Action.h\"\n#endif// _ACTION_H_\n\nnamespace Training\n{\n //------------------------------------------------------------------------------\n // class definitions\n //------------------------------------------------------------------------------\n class SetRadarLODAction : public Action\n {\n public:\n /* void */ SetRadarLODAction (RadarImage::RadarLOD radarLOD);\n virtual /* void */ ~SetRadarLODAction (void);\n virtual void Execute (void);\n\n protected:\n RadarImage::RadarLOD m_radarLOD;\n };\n\n //------------------------------------------------------------------------------\n}\n\n#endif //_SET_RADAR_LOD_ACTION_H_\n", "meta": {"content_hash": "5d58036fec80b4bd6c5b57769ea7360e", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 90, "avg_line_length": 29.678571428571427, "alnum_prop": 0.3850782190132371, "repo_name": "AllegianceZone/Allegiance", "id": "4431344ef9dd4614b2cf5ba28fb5240e002ccb7b", "size": "1065", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/training/SetRadarLODAction.h", "mode": "33188", "license": "mit", "language": [{"name": "Assembly", "bytes": "26927"}, {"name": "Batchfile", "bytes": "20387"}, {"name": "C", "bytes": "3213698"}, {"name": "C++", "bytes": "11383849"}, {"name": "CSS", "bytes": "1905"}, {"name": "HTML", "bytes": "369500"}, {"name": "JavaScript", "bytes": "125561"}, {"name": "Makefile", "bytes": "9519"}, {"name": "Objective-C", "bytes": "41562"}, {"name": "Perl", "bytes": "3074"}, {"name": "PigLatin", "bytes": "250645"}, {"name": "Roff", "bytes": "5275"}, {"name": "Visual Basic", "bytes": "7253"}, {"name": "XSLT", "bytes": "19495"}]}} {"text": "\n\npackage azure\n\nimport (\n\t\"crypto/rsa\"\n\t\"crypto/x509\"\n\t\"fmt\"\n\t\"io\"\n\t\"io/ioutil\"\n\t\"time\"\n\n\t\"k8s.io/client-go/util/flowcontrol\"\n\t\"k8s.io/kubernetes/pkg/cloudprovider\"\n\t\"k8s.io/kubernetes/pkg/controller\"\n\t\"k8s.io/kubernetes/pkg/version\"\n\n\t\"github.com/Azure/azure-sdk-for-go/arm/compute\"\n\t\"github.com/Azure/azure-sdk-for-go/arm/disk\"\n\t\"github.com/Azure/azure-sdk-for-go/arm/network\"\n\t\"github.com/Azure/azure-sdk-for-go/arm/storage\"\n\t\"github.com/Azure/go-autorest/autorest\"\n\t\"github.com/Azure/go-autorest/autorest/adal\"\n\t\"github.com/Azure/go-autorest/autorest/azure\"\n\t\"github.com/ghodss/yaml\"\n\t\"github.com/golang/glog\"\n\t\"golang.org/x/crypto/pkcs12\"\n\t\"k8s.io/apimachinery/pkg/util/wait\"\n)\n\nconst (\n\t// CloudProviderName is the value used for the --cloud-provider flag\n\tCloudProviderName = \"azure\"\n\trateLimitQPSDefault = 1.0\n\trateLimitBucketDefault = 5\n\tbackoffRetriesDefault = 6\n\tbackoffExponentDefault = 1.5\n\tbackoffDurationDefault = 5 // in seconds\n\tbackoffJitterDefault = 1.0\n)\n\n// Config holds the configuration parsed from the --cloud-config flag\n// All fields are required unless otherwise specified\ntype Config struct {\n\t// The cloud environment identifier. Takes values from https://github.com/Azure/go-autorest/blob/ec5f4903f77ed9927ac95b19ab8e44ada64c1356/autorest/azure/environments.go#L13\n\tCloud string `json:\"cloud\" yaml:\"cloud\"`\n\t// The AAD Tenant ID for the Subscription that the cluster is deployed in\n\tTenantID string `json:\"tenantId\" yaml:\"tenantId\"`\n\t// The ID of the Azure Subscription that the cluster is deployed in\n\tSubscriptionID string `json:\"subscriptionId\" yaml:\"subscriptionId\"`\n\t// The name of the resource group that the cluster is deployed in\n\tResourceGroup string `json:\"resourceGroup\" yaml:\"resourceGroup\"`\n\t// The location of the resource group that the cluster is deployed in\n\tLocation string `json:\"location\" yaml:\"location\"`\n\t// The name of the VNet that the cluster is deployed in\n\tVnetName string `json:\"vnetName\" yaml:\"vnetName\"`\n\t// The name of the resource group that the Vnet is deployed in\n\tVnetResourceGroup string `json:\"vnetResourceGroup\" yaml:\"vnetResourceGroup\"`\n\t// The name of the subnet that the cluster is deployed in\n\tSubnetName string `json:\"subnetName\" yaml:\"subnetName\"`\n\t// The name of the security group attached to the cluster's subnet\n\tSecurityGroupName string `json:\"securityGroupName\" yaml:\"securityGroupName\"`\n\t// (Optional in 1.6) The name of the route table attached to the subnet that the cluster is deployed in\n\tRouteTableName string `json:\"routeTableName\" yaml:\"routeTableName\"`\n\t// (Optional) The name of the availability set that should be used as the load balancer backend\n\t// If this is set, the Azure cloudprovider will only add nodes from that availability set to the load\n\t// balancer backend pool. If this is not set, and multiple agent pools (availability sets) are used, then\n\t// the cloudprovider will try to add all nodes to a single backend pool which is forbidden.\n\t// In other words, if you use multiple agent pools (availability sets), you MUST set this field.\n\tPrimaryAvailabilitySetName string `json:\"primaryAvailabilitySetName\" yaml:\"primaryAvailabilitySetName\"`\n\n\t// The ClientID for an AAD application with RBAC access to talk to Azure RM APIs\n\tAADClientID string `json:\"aadClientId\" yaml:\"aadClientId\"`\n\t// The ClientSecret for an AAD application with RBAC access to talk to Azure RM APIs\n\tAADClientSecret string `json:\"aadClientSecret\" yaml:\"aadClientSecret\"`\n\t// The path of a client certificate for an AAD application with RBAC access to talk to Azure RM APIs\n\tAADClientCertPath string `json:\"aadClientCertPath\" yaml:\"aadClientCertPath\"`\n\t// The password of the client certificate for an AAD application with RBAC access to talk to Azure RM APIs\n\tAADClientCertPassword string `json:\"aadClientCertPassword\" yaml:\"aadClientCertPassword\"`\n\t// Enable exponential backoff to manage resource request retries\n\tCloudProviderBackoff bool `json:\"cloudProviderBackoff\" yaml:\"cloudProviderBackoff\"`\n\t// Backoff retry limit\n\tCloudProviderBackoffRetries int `json:\"cloudProviderBackoffRetries\" yaml:\"cloudProviderBackoffRetries\"`\n\t// Backoff exponent\n\tCloudProviderBackoffExponent float64 `json:\"cloudProviderBackoffExponent\" yaml:\"cloudProviderBackoffExponent\"`\n\t// Backoff duration\n\tCloudProviderBackoffDuration int `json:\"cloudProviderBackoffDuration\" yaml:\"cloudProviderBackoffDuration\"`\n\t// Backoff jitter\n\tCloudProviderBackoffJitter float64 `json:\"cloudProviderBackoffJitter\" yaml:\"cloudProviderBackoffJitter\"`\n\t// Enable rate limiting\n\tCloudProviderRateLimit bool `json:\"cloudProviderRateLimit\" yaml:\"cloudProviderRateLimit\"`\n\t// Rate limit QPS\n\tCloudProviderRateLimitQPS float32 `json:\"cloudProviderRateLimitQPS\" yaml:\"cloudProviderRateLimitQPS\"`\n\t// Rate limit Bucket Size\n\tCloudProviderRateLimitBucket int `json:\"cloudProviderRateLimitBucket\" yaml:\"cloudProviderRateLimitBucket\"`\n\n\t// Use instance metadata service where possible\n\tUseInstanceMetadata bool `json:\"useInstanceMetadata\" yaml:\"useInstanceMetadata\"`\n\n\t// Use managed service identity for the virtual machine to access Azure ARM APIs\n\tUseManagedIdentityExtension bool `json:\"useManagedIdentityExtension\"`\n}\n\n// Cloud holds the config and clients\ntype Cloud struct {\n\tConfig\n\tEnvironment azure.Environment\n\tRoutesClient network.RoutesClient\n\tSubnetsClient network.SubnetsClient\n\tInterfacesClient network.InterfacesClient\n\tRouteTablesClient network.RouteTablesClient\n\tLoadBalancerClient network.LoadBalancersClient\n\tPublicIPAddressesClient network.PublicIPAddressesClient\n\tSecurityGroupsClient network.SecurityGroupsClient\n\tVirtualMachinesClient compute.VirtualMachinesClient\n\tStorageAccountClient storage.AccountsClient\n\tDisksClient disk.DisksClient\n\toperationPollRateLimiter flowcontrol.RateLimiter\n\tresourceRequestBackoff wait.Backoff\n\tmetadata *InstanceMetadata\n\n\t*BlobDiskController\n\t*ManagedDiskController\n\t*controllerCommon\n}\n\nfunc init() {\n\tcloudprovider.RegisterCloudProvider(CloudProviderName, NewCloud)\n}\n\n// decodePkcs12 decodes a PKCS#12 client certificate by extracting the public certificate and\n// the private RSA key\nfunc decodePkcs12(pkcs []byte, password string) (*x509.Certificate, *rsa.PrivateKey, error) {\n\tprivateKey, certificate, err := pkcs12.Decode(pkcs, password)\n\tif err != nil {\n\t\treturn nil, nil, fmt.Errorf(\"decoding the PKCS#12 client certificate: %v\", err)\n\t}\n\trsaPrivateKey, isRsaKey := privateKey.(*rsa.PrivateKey)\n\tif !isRsaKey {\n\t\treturn nil, nil, fmt.Errorf(\"PKCS#12 certificate must contain a RSA private key\")\n\t}\n\n\treturn certificate, rsaPrivateKey, nil\n}\n\n// GetServicePrincipalToken creates a new service principal token based on the configuration\nfunc GetServicePrincipalToken(config *Config, env *azure.Environment) (*adal.ServicePrincipalToken, error) {\n\toauthConfig, err := adal.NewOAuthConfig(env.ActiveDirectoryEndpoint, config.TenantID)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"creating the OAuth config: %v\", err)\n\t}\n\n\tif config.UseManagedIdentityExtension {\n\t\tglog.V(2).Infoln(\"azure: using managed identity extension to retrieve access token\")\n\t\tmsiEndpoint, err := adal.GetMSIVMEndpoint()\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"Getting the managed service identity endpoint: %v\", err)\n\t\t}\n\t\treturn adal.NewServicePrincipalTokenFromMSI(\n\t\t\tmsiEndpoint,\n\t\t\tenv.ServiceManagementEndpoint)\n\t}\n\n\tif len(config.AADClientSecret) > 0 {\n\t\tglog.V(2).Infoln(\"azure: using client_id+client_secret to retrieve access token\")\n\t\treturn adal.NewServicePrincipalToken(\n\t\t\t*oauthConfig,\n\t\t\tconfig.AADClientID,\n\t\t\tconfig.AADClientSecret,\n\t\t\tenv.ServiceManagementEndpoint)\n\t}\n\n\tif len(config.AADClientCertPath) > 0 && len(config.AADClientCertPassword) > 0 {\n\t\tglog.V(2).Infoln(\"azure: using jwt client_assertion (client_cert+client_private_key) to retrieve access token\")\n\t\tcertData, err := ioutil.ReadFile(config.AADClientCertPath)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"reading the client certificate from file %s: %v\", config.AADClientCertPath, err)\n\t\t}\n\t\tcertificate, privateKey, err := decodePkcs12(certData, config.AADClientCertPassword)\n\t\tif err != nil {\n\t\t\treturn nil, fmt.Errorf(\"decoding the client certificate: %v\", err)\n\t\t}\n\t\treturn adal.NewServicePrincipalTokenFromCertificate(\n\t\t\t*oauthConfig,\n\t\t\tconfig.AADClientID,\n\t\t\tcertificate,\n\t\t\tprivateKey,\n\t\t\tenv.ServiceManagementEndpoint)\n\t}\n\n\treturn nil, fmt.Errorf(\"No credentials provided for AAD application %s\", config.AADClientID)\n}\n\n// NewCloud returns a Cloud with initialized clients\nfunc NewCloud(configReader io.Reader) (cloudprovider.Interface, error) {\n\tconfig, env, err := ParseConfig(configReader)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\taz := Cloud{\n\t\tConfig: *config,\n\t\tEnvironment: *env,\n\t}\n\n\tservicePrincipalToken, err := GetServicePrincipalToken(config, env)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\taz.SubnetsClient = network.NewSubnetsClient(az.SubscriptionID)\n\taz.SubnetsClient.BaseURI = az.Environment.ResourceManagerEndpoint\n\taz.SubnetsClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\taz.SubnetsClient.PollingDelay = 5 * time.Second\n\tconfigureUserAgent(&az.SubnetsClient.Client)\n\n\taz.RouteTablesClient = network.NewRouteTablesClient(az.SubscriptionID)\n\taz.RouteTablesClient.BaseURI = az.Environment.ResourceManagerEndpoint\n\taz.RouteTablesClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\taz.RouteTablesClient.PollingDelay = 5 * time.Second\n\tconfigureUserAgent(&az.RouteTablesClient.Client)\n\n\taz.RoutesClient = network.NewRoutesClient(az.SubscriptionID)\n\taz.RoutesClient.BaseURI = az.Environment.ResourceManagerEndpoint\n\taz.RoutesClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\taz.RoutesClient.PollingDelay = 5 * time.Second\n\tconfigureUserAgent(&az.RoutesClient.Client)\n\n\taz.InterfacesClient = network.NewInterfacesClient(az.SubscriptionID)\n\taz.InterfacesClient.BaseURI = az.Environment.ResourceManagerEndpoint\n\taz.InterfacesClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\taz.InterfacesClient.PollingDelay = 5 * time.Second\n\tconfigureUserAgent(&az.InterfacesClient.Client)\n\n\taz.LoadBalancerClient = network.NewLoadBalancersClient(az.SubscriptionID)\n\taz.LoadBalancerClient.BaseURI = az.Environment.ResourceManagerEndpoint\n\taz.LoadBalancerClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\taz.LoadBalancerClient.PollingDelay = 5 * time.Second\n\tconfigureUserAgent(&az.LoadBalancerClient.Client)\n\n\taz.VirtualMachinesClient = compute.NewVirtualMachinesClient(az.SubscriptionID)\n\taz.VirtualMachinesClient.BaseURI = az.Environment.ResourceManagerEndpoint\n\taz.VirtualMachinesClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\taz.VirtualMachinesClient.PollingDelay = 5 * time.Second\n\tconfigureUserAgent(&az.VirtualMachinesClient.Client)\n\n\taz.PublicIPAddressesClient = network.NewPublicIPAddressesClient(az.SubscriptionID)\n\taz.PublicIPAddressesClient.BaseURI = az.Environment.ResourceManagerEndpoint\n\taz.PublicIPAddressesClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\taz.PublicIPAddressesClient.PollingDelay = 5 * time.Second\n\tconfigureUserAgent(&az.PublicIPAddressesClient.Client)\n\n\taz.SecurityGroupsClient = network.NewSecurityGroupsClient(az.SubscriptionID)\n\taz.SecurityGroupsClient.BaseURI = az.Environment.ResourceManagerEndpoint\n\taz.SecurityGroupsClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\taz.SecurityGroupsClient.PollingDelay = 5 * time.Second\n\tconfigureUserAgent(&az.SecurityGroupsClient.Client)\n\n\taz.StorageAccountClient = storage.NewAccountsClientWithBaseURI(az.Environment.ResourceManagerEndpoint, az.SubscriptionID)\n\taz.StorageAccountClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\tconfigureUserAgent(&az.StorageAccountClient.Client)\n\n\taz.DisksClient = disk.NewDisksClientWithBaseURI(az.Environment.ResourceManagerEndpoint, az.SubscriptionID)\n\taz.DisksClient.Authorizer = autorest.NewBearerAuthorizer(servicePrincipalToken)\n\tconfigureUserAgent(&az.DisksClient.Client)\n\n\t// Conditionally configure rate limits\n\tif az.CloudProviderRateLimit {\n\t\t// Assign rate limit defaults if no configuration was passed in\n\t\tif az.CloudProviderRateLimitQPS == 0 {\n\t\t\taz.CloudProviderRateLimitQPS = rateLimitQPSDefault\n\t\t}\n\t\tif az.CloudProviderRateLimitBucket == 0 {\n\t\t\taz.CloudProviderRateLimitBucket = rateLimitBucketDefault\n\t\t}\n\t\taz.operationPollRateLimiter = flowcontrol.NewTokenBucketRateLimiter(\n\t\t\taz.CloudProviderRateLimitQPS,\n\t\t\taz.CloudProviderRateLimitBucket)\n\t\tglog.V(2).Infof(\"Azure cloudprovider using rate limit config: QPS=%g, bucket=%d\",\n\t\t\taz.CloudProviderRateLimitQPS,\n\t\t\taz.CloudProviderRateLimitBucket)\n\t} else {\n\t\t// if rate limits are configured off, az.operationPollRateLimiter.Accept() is a no-op\n\t\taz.operationPollRateLimiter = flowcontrol.NewFakeAlwaysRateLimiter()\n\t}\n\n\t// Conditionally configure resource request backoff\n\tif az.CloudProviderBackoff {\n\t\t// Assign backoff defaults if no configuration was passed in\n\t\tif az.CloudProviderBackoffRetries == 0 {\n\t\t\taz.CloudProviderBackoffRetries = backoffRetriesDefault\n\t\t}\n\t\tif az.CloudProviderBackoffExponent == 0 {\n\t\t\taz.CloudProviderBackoffExponent = backoffExponentDefault\n\t\t}\n\t\tif az.CloudProviderBackoffDuration == 0 {\n\t\t\taz.CloudProviderBackoffDuration = backoffDurationDefault\n\t\t}\n\t\tif az.CloudProviderBackoffJitter == 0 {\n\t\t\taz.CloudProviderBackoffJitter = backoffJitterDefault\n\t\t}\n\t\taz.resourceRequestBackoff = wait.Backoff{\n\t\t\tSteps: az.CloudProviderBackoffRetries,\n\t\t\tFactor: az.CloudProviderBackoffExponent,\n\t\t\tDuration: time.Duration(az.CloudProviderBackoffDuration) * time.Second,\n\t\t\tJitter: az.CloudProviderBackoffJitter,\n\t\t}\n\t\tglog.V(2).Infof(\"Azure cloudprovider using retry backoff: retries=%d, exponent=%f, duration=%d, jitter=%f\",\n\t\t\taz.CloudProviderBackoffRetries,\n\t\t\taz.CloudProviderBackoffExponent,\n\t\t\taz.CloudProviderBackoffDuration,\n\t\t\taz.CloudProviderBackoffJitter)\n\t}\n\n\taz.metadata = NewInstanceMetadata()\n\n\tif err := initDiskControllers(&az); err != nil {\n\t\treturn nil, err\n\t}\n\treturn &az, nil\n}\n\n// ParseConfig returns a parsed configuration and azure.Environment for an Azure cloudprovider config file\nfunc ParseConfig(configReader io.Reader) (*Config, *azure.Environment, error) {\n\tvar config Config\n\tvar env azure.Environment\n\n\tif configReader == nil {\n\t\treturn &config, &env, nil\n\t}\n\n\tconfigContents, err := ioutil.ReadAll(configReader)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\terr = yaml.Unmarshal(configContents, &config)\n\tif err != nil {\n\t\treturn nil, nil, err\n\t}\n\n\tif config.Cloud == \"\" {\n\t\tenv = azure.PublicCloud\n\t} else {\n\t\tenv, err = azure.EnvironmentFromName(config.Cloud)\n\t\tif err != nil {\n\t\t\treturn nil, nil, err\n\t\t}\n\t}\n\treturn &config, &env, nil\n}\n\n// Initialize passes a Kubernetes clientBuilder interface to the cloud provider\nfunc (az *Cloud) Initialize(clientBuilder controller.ControllerClientBuilder) {}\n\n// LoadBalancer returns a balancer interface. Also returns true if the interface is supported, false otherwise.\nfunc (az *Cloud) LoadBalancer() (cloudprovider.LoadBalancer, bool) {\n\treturn az, true\n}\n\n// Instances returns an instances interface. Also returns true if the interface is supported, false otherwise.\nfunc (az *Cloud) Instances() (cloudprovider.Instances, bool) {\n\treturn az, true\n}\n\n// Zones returns a zones interface. Also returns true if the interface is supported, false otherwise.\nfunc (az *Cloud) Zones() (cloudprovider.Zones, bool) {\n\treturn az, true\n}\n\n// Clusters returns a clusters interface. Also returns true if the interface is supported, false otherwise.\nfunc (az *Cloud) Clusters() (cloudprovider.Clusters, bool) {\n\treturn nil, false\n}\n\n// Routes returns a routes interface along with whether the interface is supported.\nfunc (az *Cloud) Routes() (cloudprovider.Routes, bool) {\n\treturn az, true\n}\n\n// ScrubDNS provides an opportunity for cloud-provider-specific code to process DNS settings for pods.\nfunc (az *Cloud) ScrubDNS(nameservers, searches []string) (nsOut, srchOut []string) {\n\treturn nameservers, searches\n}\n\n// HasClusterID returns true if the cluster has a clusterID\nfunc (az *Cloud) HasClusterID() bool {\n\treturn true\n}\n\n// ProviderName returns the cloud provider ID.\nfunc (az *Cloud) ProviderName() string {\n\treturn CloudProviderName\n}\n\n// configureUserAgent configures the autorest client with a user agent that\n// includes \"kubernetes\" and the full kubernetes git version string\n// example:\n// Azure-SDK-for-Go/7.0.1-beta arm-network/2016-09-01; kubernetes-cloudprovider/v1.7.0-alpha.2.711+a2fadef8170bb0-dirty;\nfunc configureUserAgent(client *autorest.Client) {\n\tk8sVersion := version.Get().GitVersion\n\tclient.UserAgent = fmt.Sprintf(\"%s; kubernetes-cloudprovider/%s\", client.UserAgent, k8sVersion)\n}\n\nfunc initDiskControllers(az *Cloud) error {\n\t// Common controller contains the function\n\t// needed by both blob disk and managed disk controllers\n\n\tcommon := &controllerCommon{\n\t\taadResourceEndPoint: az.Environment.ServiceManagementEndpoint,\n\t\tclientID: az.AADClientID,\n\t\tclientSecret: az.AADClientSecret,\n\t\tlocation: az.Location,\n\t\tstorageEndpointSuffix: az.Environment.StorageEndpointSuffix,\n\t\tmanagementEndpoint: az.Environment.ResourceManagerEndpoint,\n\t\tresourceGroup: az.ResourceGroup,\n\t\ttenantID: az.TenantID,\n\t\ttokenEndPoint: az.Environment.ActiveDirectoryEndpoint,\n\t\tsubscriptionID: az.SubscriptionID,\n\t\tcloud: az,\n\t}\n\n\t// BlobDiskController: contains the function needed to\n\t// create/attach/detach/delete blob based (unmanaged disks)\n\tblobController, err := newBlobDiskController(common)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"AzureDisk - failed to init Blob Disk Controller with error (%s)\", err.Error())\n\t}\n\n\t// ManagedDiskController: contains the functions needed to\n\t// create/attach/detach/delete managed disks\n\tmanagedController, err := newManagedDiskController(common)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"AzureDisk - failed to init Managed Disk Controller with error (%s)\", err.Error())\n\t}\n\n\taz.BlobDiskController = blobController\n\taz.ManagedDiskController = managedController\n\taz.controllerCommon = common\n\n\treturn nil\n}\n", "meta": {"content_hash": "ddceafad59fd4554816645c7d8d72191", "timestamp": "", "source": "github", "line_count": 440, "max_line_length": 173, "avg_line_length": 41.402272727272724, "alnum_prop": 0.7819070099357742, "repo_name": "guangxuli/kubernetes", "id": "ee9ebf352fae9752d3970f87599c2284327e7c59", "size": "18786", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "pkg/cloudprovider/providers/azure/azure.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "2525"}, {"name": "Go", "bytes": "37484418"}, {"name": "HTML", "bytes": "1199467"}, {"name": "Makefile", "bytes": "73377"}, {"name": "PowerShell", "bytes": "4261"}, {"name": "Python", "bytes": "2546728"}, {"name": "Ruby", "bytes": "1733"}, {"name": "SaltStack", "bytes": "52094"}, {"name": "Shell", "bytes": "1642811"}]}} {"text": "module API\nend\n", "meta": {"content_hash": "2c708cc6e49eed2c0bf9a5a6704eb0b9", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 10, "avg_line_length": 7.5, "alnum_prop": 0.8, "repo_name": "metova/metova-rails", "id": "6e6f9edc6c2e514e6541b35d6b4cff91c29bb9e5", "size": "15", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/metova/api.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "1500"}, {"name": "HTML", "bytes": "5647"}, {"name": "JavaScript", "bytes": "1351"}, {"name": "Ruby", "bytes": "87018"}]}} {"text": ";\n\n BluetoothServiceAttributeValueBlueZ();\n BluetoothServiceAttributeValueBlueZ(Type type,\n size_t size,\n std::unique_ptr value);\n explicit BluetoothServiceAttributeValueBlueZ(\n std::unique_ptr sequence);\n BluetoothServiceAttributeValueBlueZ(\n const BluetoothServiceAttributeValueBlueZ& attribute);\n BluetoothServiceAttributeValueBlueZ& operator=(\n const BluetoothServiceAttributeValueBlueZ& attribute);\n ~BluetoothServiceAttributeValueBlueZ();\n\n Type type() const { return type_; }\n size_t size() const { return size_; }\n const Sequence& sequence() const { return *sequence_.get(); }\n const base::Value& value() const { return *value_.get(); }\n\n private:\n Type type_;\n size_t size_;\n std::unique_ptr value_;\n std::unique_ptr sequence_;\n};\n\n} // namespace bluez\n\n#endif // DEVICE_BLUETOOTH_BLUEZ_BLUETOOTH_SERVICE_ATTRIBUTE_VALUE_BLUEZ_H_\n", "meta": {"content_hash": "6ddee5f2d41f0e4baba9525148d757df", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 80, "avg_line_length": 42.266666666666666, "alnum_prop": 0.7018927444794952, "repo_name": "ssaroha/node-webrtc", "id": "fdd291a5f100e4cdced1af2460235afdcf30b4ec", "size": "2346", "binary": false, "copies": "14", "ref": "refs/heads/develop", "path": "third_party/webrtc/include/chromium/src/device/bluetooth/bluez/bluetooth_service_attribute_value_bluez.h", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "Batchfile", "bytes": "6179"}, {"name": "C", "bytes": "2679"}, {"name": "C++", "bytes": "54327"}, {"name": "HTML", "bytes": "434"}, {"name": "JavaScript", "bytes": "42707"}, {"name": "Python", "bytes": "3835"}]}} {"text": "use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};\nuse ::RegType::*;\nuse ::instruction_def::*;\nuse ::Operand::*;\nuse ::Reg::*;\nuse ::RegScale::*;\nuse ::test::run_test;\n\n#[test]\nfn vmovupd_1() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM1)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 16, 241], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_2() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM0)), operand2: Some(IndirectScaledIndexed(ECX, EDX, Four, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 16, 4, 145], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_3() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM3)), operand2: Some(Direct(XMM5)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 16, 221], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_4() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM3)), operand2: Some(Indirect(RAX, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 16, 24], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_5() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM0)), operand2: Some(Direct(YMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 16, 198], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_6() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM5)), operand2: Some(Indirect(ESI, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 16, 46], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_7() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 16, 234], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_8() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM2)), operand2: Some(Indirect(RSI, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 16, 22], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_9() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 253, 140, 16, 247], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_10() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM1)), operand2: Some(IndirectScaledIndexed(EDX, ECX, Four, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 241, 253, 137, 16, 12, 138], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_11() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM20)), operand2: Some(Direct(XMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 225, 253, 142, 16, 228], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_12() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM17)), operand2: Some(IndirectDisplaced(RCX, 1305176396, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 225, 253, 137, 16, 137, 76, 105, 203, 77], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_13() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 253, 175, 16, 252], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_14() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM5)), operand2: Some(Indirect(EAX, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 253, 170, 16, 40], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_15() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM28)), operand2: Some(Direct(YMM30)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 1, 253, 173, 16, 230], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_16() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM30)), operand2: Some(IndirectDisplaced(RDX, 408359552, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 97, 253, 172, 16, 178, 128, 18, 87, 24], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_17() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(ZMM0)), operand2: Some(Direct(ZMM1)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 253, 207, 16, 193], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_18() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(ZMM0)), operand2: Some(IndirectScaledIndexed(ESI, ECX, Eight, Some(OperandSize::Zmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 253, 204, 16, 4, 206], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_19() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(ZMM1)), operand2: Some(Direct(ZMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 253, 206, 16, 204], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_20() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(ZMM11)), operand2: Some(IndirectScaledDisplaced(RBX, Eight, 1416962940, Some(OperandSize::Zmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 113, 253, 201, 16, 28, 221, 124, 35, 117, 84], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_21() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM5)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 16, 205], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_22() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(IndirectScaledIndexedDisplaced(EBX, ESI, Four, 270910934, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(XMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 17, 132, 179, 214, 197, 37, 16], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_23() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 16, 252], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_24() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(IndirectScaledIndexedDisplaced(RDI, RSI, Four, 1129257858, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(XMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 17, 164, 183, 130, 27, 79, 67], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_25() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 16, 223], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_26() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Indirect(EDI, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(YMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 17, 55], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_27() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 16, 232], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_28() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(IndirectDisplaced(RSI, 1989076454, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(YMM5)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 17, 174, 230, 229, 142, 118], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_29() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 253, 138, 16, 244], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_30() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(IndirectScaledIndexedDisplaced(EBX, ESI, Eight, 1711242735, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(XMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 17, 148, 243, 239, 125, 255, 101], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_31() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(XMM16)), operand2: Some(Direct(XMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 225, 253, 139, 16, 194], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_32() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(IndirectDisplaced(RBX, 2001347007, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(XMM24)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 97, 253, 8, 17, 131, 191, 33, 74, 119], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_33() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM2)), operand2: Some(Direct(YMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 253, 173, 16, 210], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_34() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Indirect(EDX, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(YMM1)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 253, 17, 10], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_35() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(YMM1)), operand2: Some(Direct(YMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 253, 171, 16, 206], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_36() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(IndirectDisplaced(RDI, 23682063, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(YMM30)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 97, 253, 40, 17, 183, 15, 92, 105, 1], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_37() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(ZMM0)), operand2: Some(Direct(ZMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 253, 205, 16, 199], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_38() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(IndirectScaledDisplaced(EAX, Eight, 2015011794, Some(OperandSize::Zmmword), None)), operand2: Some(Direct(ZMM3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 241, 253, 72, 17, 28, 197, 210, 163, 26, 120], OperandSize::Dword)\n}\n\n#[test]\nfn vmovupd_39() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(Direct(ZMM3)), operand2: Some(Direct(ZMM21)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 177, 253, 205, 16, 221], OperandSize::Qword)\n}\n\n#[test]\nfn vmovupd_40() {\n run_test(&Instruction { mnemonic: Mnemonic::VMOVUPD, operand1: Some(IndirectDisplaced(RDX, 1033804406, Some(OperandSize::Zmmword), None)), operand2: Some(Direct(ZMM10)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 113, 253, 72, 17, 146, 118, 154, 158, 61], OperandSize::Qword)\n}\n\n", "meta": {"content_hash": "7623f27e6c7f5b0023deeedfa91e31ac", "timestamp": "", "source": "github", "line_count": 208, "max_line_length": 416, "avg_line_length": 71.5, "alnum_prop": 0.6976196880043034, "repo_name": "GregoryComer/rust-x86asm", "id": "3d8ab93fa9770b9b96827f31fd2ecdf71a7467a7", "size": "14872", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/instruction_tests/instr_vmovupd.rs", "mode": "33188", "license": "mit", "language": [{"name": "Rust", "bytes": "18282823"}, {"name": "Shell", "bytes": "3038"}]}} {"text": "\ufeffusing UnityEngine;\nusing System.Collections;\n\npublic class Arrow : MonoBehaviour {\n\n\tAnimator _anim;\n\tVector3 facing;\n\tpublic float rayDist;\n\tbool fall;\n\tVector3 yPos;\n\tpublic float fallTime;\n\tTransform _myTransform;\n\tVector3 boxScale;\n\tBoxCollider boxColl;\n\t[HideInInspector]public float damage;\n\tPlayerRanger pr;\n\tGameObject ranger;\n\n\tvoid Awake()\n\t{\n\t\t//_anim.GetComponent();\n\t\tboxColl = GetComponent();\n\t\tboxScale = new Vector3(boxColl.size.x, boxColl.size.y, boxColl.size.z+.2f);\n\t\t_myTransform = transform;\n\t\tyPos = new Vector3(0, 1, 0);\n\t\ttransform.up = yPos;\n\t\tranger = GameObject.FindWithTag(\"Ranger\");\n\t}\n\t\n\tvoid OnCollisionEnter(Collision other)\n\t{\n\t\trigidbody.velocity = Vector3.zero;\n\t\tfall = true;\n\t}\n\n\tvoid OnTriggerEnter (Collider other)\n\t{\n\t\tif(other.tag == \"Ranger\")\n\t\t{\n\t\t\tother.GetComponent().arrowsCurr++;\n\t\t\tDestroy(gameObject);\n\t\t}\n\t}\n\n\tvoid Update()\n\t{\n\t\tDebug.DrawRay (transform.position, transform.right * rayDist, Color.magenta);\n\t\tRaycastHit hit;\n\t\tif(Physics.Raycast(transform.position, transform.right, out hit, rayDist))\n\t\t{\n\t\t\tif(hit.collider.tag == \"Prop\")\n\t\t\t{\n\t\t\t\trayDist = 0.015f;\n\t\t\t\tCollection();\n\t\t\t}\n\t\t\tif(hit.collider.tag == \"Warrior\")\n\t\t\t{\n\t\t\t\trayDist = 0.015f;\n\t\t\t\tif(!hit.collider.GetComponent().lockedOn)\n\t\t\t\t{\n\t\t\t\t\thit.collider.BroadcastMessage(\"ApplyDamage\", damage);\n\t\t\t\t\thit.collider.BroadcastMessage(\"StopHealthCharge\");\n\t\t\t\t\tranger.GetComponent().arrowsCurr++;\n\t\t\t\t\tDestroy(gameObject);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tif(fall)\n\t\t{\n\t\t\trigidbody.useGravity = true;\n\t\t\tif(_myTransform.position.y <= -5.94f)\n\t\t\t{\n\t\t\t\trigidbody.velocity = Vector3.zero;\n\t\t\t\trigidbody.useGravity = false;\n\t\t\t\tCollection();\n\t\t\t\tGetComponent().size = boxScale;\n\t\t\t\tfall = false;\n\t\t\t}\n\t\t}\n\t}\n\n\tvoid Collection()\n\t{\n\t\trigidbody.isKinematic = true;\n\t\tGetComponent().isTrigger = true;\n\t\tgameObject.layer = LayerMask.NameToLayer(\"Default\");\n\t}\n}\n", "meta": {"content_hash": "dc3707eb0f56ef03a83c4821ec55f78b", "timestamp": "", "source": "github", "line_count": 89, "max_line_length": 79, "avg_line_length": 21.876404494382022, "alnum_prop": 0.6825885978428351, "repo_name": "Daeltaja/FinalProj", "id": "8aa416f1cd2b8a73df2b5a78c8e5b3442ce06ebd", "size": "1949", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Assets/Scripts/Arrow.cs", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "1725"}, {"name": "C#", "bytes": "306121"}, {"name": "JavaScript", "bytes": "216"}]}} {"text": "/**\n * CommonError.java\n *\n * This file was auto-generated from WSDL\n * by the Apache Axis 1.4 Apr 22, 2006 (06:55:48 PDT) WSDL2Java emitter.\n */\n\npackage com.google.api.ads.dfp.v201302;\n\n\n/**\n * A place for common errors that can be used across services.\n */\npublic class CommonError extends com.google.api.ads.dfp.v201302.ApiError implements java.io.Serializable {\n private com.google.api.ads.dfp.v201302.CommonErrorReason reason;\n\n public CommonError() {\n }\n\n public CommonError(\n java.lang.String fieldPath,\n java.lang.String trigger,\n java.lang.String errorString,\n java.lang.String apiErrorType,\n com.google.api.ads.dfp.v201302.CommonErrorReason reason) {\n super(\n fieldPath,\n trigger,\n errorString,\n apiErrorType);\n this.reason = reason;\n }\n\n\n /**\n * Gets the reason value for this CommonError.\n * \n * @return reason\n */\n public com.google.api.ads.dfp.v201302.CommonErrorReason getReason() {\n return reason;\n }\n\n\n /**\n * Sets the reason value for this CommonError.\n * \n * @param reason\n */\n public void setReason(com.google.api.ads.dfp.v201302.CommonErrorReason reason) {\n this.reason = reason;\n }\n\n private java.lang.Object __equalsCalc = null;\n public synchronized boolean equals(java.lang.Object obj) {\n if (!(obj instanceof CommonError)) return false;\n CommonError other = (CommonError) obj;\n if (obj == null) return false;\n if (this == obj) return true;\n if (__equalsCalc != null) {\n return (__equalsCalc == obj);\n }\n __equalsCalc = obj;\n boolean _equals;\n _equals = super.equals(obj) && \n ((this.reason==null && other.getReason()==null) || \n (this.reason!=null &&\n this.reason.equals(other.getReason())));\n __equalsCalc = null;\n return _equals;\n }\n\n private boolean __hashCodeCalc = false;\n public synchronized int hashCode() {\n if (__hashCodeCalc) {\n return 0;\n }\n __hashCodeCalc = true;\n int _hashCode = super.hashCode();\n if (getReason() != null) {\n _hashCode += getReason().hashCode();\n }\n __hashCodeCalc = false;\n return _hashCode;\n }\n\n // Type metadata\n private static org.apache.axis.description.TypeDesc typeDesc =\n new org.apache.axis.description.TypeDesc(CommonError.class, true);\n\n static {\n typeDesc.setXmlType(new javax.xml.namespace.QName(\"https://www.google.com/apis/ads/publisher/v201302\", \"CommonError\"));\n org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();\n elemField.setFieldName(\"reason\");\n elemField.setXmlName(new javax.xml.namespace.QName(\"https://www.google.com/apis/ads/publisher/v201302\", \"reason\"));\n elemField.setXmlType(new javax.xml.namespace.QName(\"https://www.google.com/apis/ads/publisher/v201302\", \"CommonError.Reason\"));\n elemField.setMinOccurs(0);\n elemField.setNillable(false);\n typeDesc.addFieldDesc(elemField);\n }\n\n /**\n * Return type metadata object\n */\n public static org.apache.axis.description.TypeDesc getTypeDesc() {\n return typeDesc;\n }\n\n /**\n * Get Custom Serializer\n */\n public static org.apache.axis.encoding.Serializer getSerializer(\n java.lang.String mechType, \n java.lang.Class _javaType, \n javax.xml.namespace.QName _xmlType) {\n return \n new org.apache.axis.encoding.ser.BeanSerializer(\n _javaType, _xmlType, typeDesc);\n }\n\n /**\n * Get Custom Deserializer\n */\n public static org.apache.axis.encoding.Deserializer getDeserializer(\n java.lang.String mechType, \n java.lang.Class _javaType, \n javax.xml.namespace.QName _xmlType) {\n return \n new org.apache.axis.encoding.ser.BeanDeserializer(\n _javaType, _xmlType, typeDesc);\n }\n\n}\n", "meta": {"content_hash": "1f209feaa3c66ee2a4a2c7d6b18158de", "timestamp": "", "source": "github", "line_count": 133, "max_line_length": 135, "avg_line_length": 30.834586466165412, "alnum_prop": 0.6100950987564009, "repo_name": "google-code-export/google-api-dfp-java", "id": "0c2a22760e7b7c87e715a51d332992ee25d88fcf", "size": "4101", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/google/api/ads/dfp/v201302/CommonError.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "39950935"}]}} {"text": "package net.sargue.mailgun;\n\nimport javax.ws.rs.client.*;\nimport java.util.List;\n\n/**\n * Representation of a Mailgun's mail request.\n *

\n * It must be built using a {@link MailBuilder}.\n */\npublic abstract class Mail {\n private final Configuration configuration;\n\n Mail(Configuration configuration) {\n this.configuration = configuration;\n }\n\n /**\n * Convenience shortcut to {@code MailBuilder.using(configuration)}\n *\n * @param configuration the configuration to use\n * @return a new {@link MailBuilder} which uses the specified configuration\n */\n public static MailBuilder using(Configuration configuration) {\n return new MailBuilder(configuration);\n }\n\n /**\n * Retrieves the value of a given mail parameter. If there are multiple\n * values the first one is returned. If the parameter hasn't been set\n * null is returned.\n *\n * Can only be used on simple parameters (String). So don't use it on\n * attachment for example. Doing so will throw a\n * {@link IllegalStateException}.\n *\n * @param param the name of the parameter\n * @return the first value of the parameter, if any, null otherwise\n * @throws IllegalStateException if the parameter is not a simple (basic text) one\n */\n public abstract String getFirstValue(String param);\n\n /**\n * Retrieves the values of a given mail parameter. If the parameter hasn't\n * been set an empty list is returned.\n *\n * Can only be used on simple parameters (String). So don't use it on\n * attachment for example. Doing so will throw a\n * {@link IllegalStateException}.\n *\n * @param param the name of the parameter\n * @return the list of values for the parameter or an empty list\n * @throws IllegalStateException if the parameter is not a simple (basic text) one\n */\n public abstract List getValues(String param);\n\n /**\n * Sends the email.\n *

\n * This method send the request to the Mailgun service. It is a\n * blocking method so it will return upon request\n * completion.\n *\n * @return the response from the Mailgun service or null if the message\n * is not sent (filtered by {@link MailSendFilter}\n */\n public Response send() {\n if (!configuration.mailSendFilter().filter(this)) return null;\n prepareSend();\n return new Response(request().post(entity()));\n }\n\n /**\n * Sends the email asynchronously.\n *

\n * This method returns immediately, sending the request to the Mailgun\n * service in the background. It is a non-blocking\n * method.\n *\n * @param callback the callback to be invoked upon completion or failure\n */\n public void sendAsync(final MailRequestCallback callback) {\n if (!configuration.mailSendFilter().filter(this)) return;\n prepareSend();\n request()\n .async()\n .post(entity(),\n new InvocationCallback() {\n @Override\n public void completed(javax.ws.rs.core.Response o) {\n callback.completed(new Response(o));\n }\n\n @Override\n public void failed(Throwable throwable) {\n callback.failed(throwable);\n }\n });\n }\n\n /**\n * Sends the email asynchronously. It uses the configuration provided\n * default callback if available, ignoring the outcome otherwise.\n *\n * If you want to use a specific callback for this call use\n * {@link #sendAsync(MailRequestCallback)} instead.\n */\n public void sendAsync() {\n if (!configuration.mailSendFilter().filter(this)) return;\n MailRequestCallbackFactory factory = configuration.mailRequestCallbackFactory();\n if (factory == null) {\n prepareSend();\n request().async().post(entity());\n } else\n sendAsync(factory.create(this));\n }\n\n /**\n * Retrieves the configuration associated with this Mail.\n *\n * @return the underlying configuration\n */\n public Configuration configuration() {\n return configuration;\n }\n\n abstract Entity entity(); //NOSONAR\n\n abstract void prepareSend();\n\n void configureTarget(WebTarget target) {\n //defaults to no-op\n }\n\n private Invocation.Builder request() {\n WebTarget target = configuration.getTarget();\n configureTarget(target);\n return target.path(configuration.domain()).path(\"messages\").request();\n }\n}\n", "meta": {"content_hash": "37c2b2a7556228c212197762bba63646", "timestamp": "", "source": "github", "line_count": 140, "max_line_length": 88, "avg_line_length": 33.93571428571428, "alnum_prop": 0.6162913070932435, "repo_name": "sargue/mailgun", "id": "a40874e02801c190fb9de545fc637f6d2c250388", "size": "4751", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/net/sargue/mailgun/Mail.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "117011"}]}} {"text": "\n\n \n\n \n \n \n \n \n \n\n", "meta": {"content_hash": "8f579652f42173c3e02ab7db9ae0706d", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 72, "avg_line_length": 30.75862068965517, "alnum_prop": 0.5964125560538116, "repo_name": "yb1415185602/collweather", "id": "658826ca8d5a0494bfc6b4e5b8c7fdf3b969b075", "size": "892", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "res/layout/choose_area.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "27177"}]}} {"text": "\n\n

\n\n \n\n field($model, 'nombre')->textInput() ?>\n\n field($model, 'direccion')->textInput() ?>\n\n field($model, 'contacto')->textInput() ?>\n\n field($model, 'telefono')->textInput() ?>\n\n field($model, 'correo')->textInput() ?>\n\n\n\n isNewRecord ? 'Guardar' : 'Guardar cambios', ['class' => $model->isNewRecord ? 'btn btn-success' : 'btn btn-primary']) ?>\n
\n\n \n\n\n", "meta": {"content_hash": "eae12c72c212f0a39e8bc9b915f06a96", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 160, "avg_line_length": 23.558823529411764, "alnum_prop": 0.5930087390761548, "repo_name": "rzamarripa/shabel", "id": "2baa8926b064eb2a287b05a5e1cdd226cbe8c53f", "size": "801", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "views/proveedor/_form.php", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "ApacheConf", "bytes": "199"}, {"name": "Batchfile", "bytes": "1030"}, {"name": "CSS", "bytes": "1533204"}, {"name": "HTML", "bytes": "179629"}, {"name": "JavaScript", "bytes": "3469599"}, {"name": "PHP", "bytes": "295872"}]}} {"text": "\n\n\n \n\n", "meta": {"content_hash": "ef328dcd460b0dc0dd7a198ee8c2140c", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 74, "avg_line_length": 43.6875, "alnum_prop": 0.7267525035765379, "repo_name": "gr4yscale/havit", "id": "ee547d115bf31a18531fca906258dac19bc0fae1", "size": "699", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "android/Havit/app/src/main/res/layout/activity_main.xml", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "6911"}, {"name": "Java", "bytes": "6698"}, {"name": "JavaScript", "bytes": "131667"}, {"name": "Objective-C", "bytes": "236235"}, {"name": "Shell", "bytes": "50"}]}} {"text": "// -*- mode: C++; c-indent-level: 4; c-basic-offset: 4; indent-tabs-mode: nil; -*-\n//\n// rcast.h: Rcpp R/C++ interface class library -- cast from one SEXP type to another\n//\n// Copyright (C) 2010 - 2013 Dirk Eddelbuettel and Romain Francois\n//\n// This file is part of Rcpp.\n//\n// Rcpp is free software: you can redistribute it and/or modify it\n// under the terms of the GNU General Public License as published by\n// the Free Software Foundation, either version 2 of the License, or\n// (at your option) any later version.\n//\n// Rcpp is distributed in the hope that it will be useful, but\n// WITHOUT ANY WARRANTY; without even the implied warranty of\n// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n// GNU General Public License for more details.\n//\n// You should have received a copy of the GNU General Public License\n// along with Rcpp. If not, see .\n\n#ifndef Rcpp_rcast_h\n#define Rcpp_rcast_h\n\n#include \n\nnamespace Rcpp{\n namespace internal {\n\n inline SEXP convert_using_rfunction(SEXP x, const char* const fun){\n Armor res ;\n try{\n SEXP funSym = Rf_install(fun);\n res = Rcpp_eval( Rf_lang2( funSym, x ) ) ;\n } catch( eval_error& e){\n throw not_compatible( std::string(\"could not convert using R function : \") + fun ) ;\n }\n return res;\n }\n\n // r_true_cast is only meant to be used when the target SEXP type\n // is different from the SEXP type of x\n template \n SEXP r_true_cast( SEXP x) {\n throw not_compatible( \"not compatible\" ) ;\n return x ; // makes solaris happy\n }\n\n template \n SEXP basic_cast( SEXP x){\n if( TYPEOF(x) == RTYPE ) return x ;\n switch( TYPEOF(x) ){\n case REALSXP:\n case RAWSXP:\n case LGLSXP:\n case CPLXSXP:\n case INTSXP:\n return Rf_coerceVector( x, RTYPE) ;\n default:\n throw ::Rcpp::not_compatible( \"not compatible with requested type\" ) ;\n }\n return R_NilValue ; /* -Wall */\n }\n\n template<>\n inline SEXP r_true_cast(SEXP x){\n return basic_cast(x) ;\n }\n template<>\n inline SEXP r_true_cast(SEXP x){\n return basic_cast(x) ;\n }\n template<>\n inline SEXP r_true_cast(SEXP x){\n return basic_cast(x) ;\n }\n template<>\n inline SEXP r_true_cast(SEXP x){\n return basic_cast(x) ;\n }\n template<>\n inline SEXP r_true_cast(SEXP x){\n return basic_cast(x) ;\n }\n\n template <>\n inline SEXP r_true_cast(SEXP x){\n switch( TYPEOF( x ) ){\n case CPLXSXP:\n case RAWSXP:\n case LGLSXP:\n case REALSXP:\n case INTSXP:\n {\n // return Rf_coerceVector( x, STRSXP );\n // coerceVector does not work for some reason\n Shield call( Rf_lang2( Rf_install( \"as.character\" ), x ) ) ;\n Shield res( Rcpp_eval( call, R_GlobalEnv ) ) ;\n return res ;\n }\n case CHARSXP:\n return Rf_ScalarString( x ) ;\n case SYMSXP:\n return Rf_ScalarString( PRINTNAME( x ) ) ;\n default:\n throw ::Rcpp::not_compatible( \"not compatible with STRSXP\" ) ;\n }\n return R_NilValue ; /* -Wall */\n }\n template<>\n inline SEXP r_true_cast(SEXP x) {\n return convert_using_rfunction(x, \"as.list\" ) ;\n }\n template<>\n inline SEXP r_true_cast(SEXP x) {\n return convert_using_rfunction(x, \"as.expression\" ) ;\n }\n template<>\n inline SEXP r_true_cast(SEXP x) {\n switch( TYPEOF(x) ){\n case LANGSXP:\n {\n Shield y( Rf_duplicate( x ));\n SET_TYPEOF(y,LISTSXP) ;\n return y ;\n }\n default:\n return convert_using_rfunction(x, \"as.pairlist\" ) ;\n }\n }\n template<>\n inline SEXP r_true_cast(SEXP x) {\n return convert_using_rfunction(x, \"as.call\" ) ;\n }\n\n } // namespace internal\n\n template SEXP r_cast(SEXP x) {\n if (TYPEOF(x) == TARGET) {\n return x;\n } else {\n #ifdef RCPP_WARN_ON_COERCE\n Shield result( internal::r_true_cast(x) );\n Rf_warning(\"coerced object from '%s' to '%s'\",\n CHAR(Rf_type2str(TYPEOF(x))),\n CHAR(Rf_type2str(TARGET))\n );\n return result;\n #else\n return internal::r_true_cast(x);\n #endif\n }\n }\n\n} // namespace Rcpp\n\n#endif\n", "meta": {"content_hash": "dc9b692f6bde56c1053394b14f140621", "timestamp": "", "source": "github", "line_count": 157, "max_line_length": 101, "avg_line_length": 32.93630573248408, "alnum_prop": 0.5167279056275382, "repo_name": "bccpp/bccpp.github.io", "id": "496a4d9965cca71e2bcd40a5c9ef2bd8f6fd096c", "size": "5171", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "packrat/lib/x86_64-w64-mingw32/3.3.2/Rcpp/include/Rcpp/r_cast.h", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\n\npackage org.apache.geronimo.connector;\n\nimport java.util.Timer;\n\nimport javax.resource.spi.UnavailableException;\nimport javax.resource.spi.XATerminator;\nimport javax.resource.spi.work.WorkManager;\n\nimport org.apache.geronimo.connector.work.GeronimoWorkManager;\n\n/**\n * GBean BootstrapContext implementation that refers to externally configured WorkManager\n * and XATerminator gbeans.\n *\n * @version $Rev$ $Date$\n */\npublic class BootstrapContextImpl implements javax.resource.spi.BootstrapContext {\n private final GeronimoWorkManager workManager;\n\n /**\n * Normal constructor for use as a GBean.\n * @param workManager\n */\n public BootstrapContextImpl(final GeronimoWorkManager workManager) {\n this.workManager = workManager;\n }\n\n\n /**\n * @see javax.resource.spi.BootstrapContext#getWorkManager()\n */\n public WorkManager getWorkManager() {\n return workManager;\n }\n\n /**\n * @see javax.resource.spi.BootstrapContext#getXATerminator()\n */\n public XATerminator getXATerminator() {\n return workManager.getXATerminator();\n }\n\n /**\n * @see javax.resource.spi.BootstrapContext#createTimer()\n */\n public Timer createTimer() throws UnavailableException {\n return new Timer();\n }\n\n// public static final GBeanInfo GBEAN_INFO;\n//\n// static {\n// GBeanInfoBuilder infoFactory = new GBeanInfoBuilder(BootstrapContext.class);\n// //adding interface does not work, creates attributes for references???\n//// infoFactory.addInterface(javax.resource.spi.BootstrapContext.class);\n//\n// infoFactory.addOperation(\"createTimer\");\n// infoFactory.addOperation(\"getWorkManager\");\n// infoFactory.addOperation(\"getXATerminator\");\n//\n// infoFactory.addReference(\"WorkManager\", WorkManager.class);\n// infoFactory.addReference(\"XATerminator\", XATerminator.class);\n//\n// infoFactory.setConstructor(new String[]{\"WorkManager\", \"XATerminator\"});\n//\n// GBEAN_INFO = infoFactory.getBeanInfo();\n// }\n//\n// public static GBeanInfo getGBeanInfo() {\n// return GBEAN_INFO;\n// }\n\n}\n", "meta": {"content_hash": "fd0a1148f0563b637e757c4619efa2bf", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 89, "avg_line_length": 28.466666666666665, "alnum_prop": 0.6899297423887588, "repo_name": "vibe13/geronimo", "id": "28ddbdfa9ff250192460eb02a3f700b9f772ee6a", "size": "2765", "binary": false, "copies": "2", "ref": "refs/heads/1.0", "path": "modules/connector/src/java/org/apache/geronimo/connector/BootstrapContextImpl.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "29627"}, {"name": "CSS", "bytes": "47972"}, {"name": "HTML", "bytes": "838469"}, {"name": "Java", "bytes": "8975734"}, {"name": "JavaScript", "bytes": "906"}, {"name": "Shell", "bytes": "32814"}, {"name": "XSLT", "bytes": "4468"}]}} {"text": "/*\n Name: Base16 Atelier Dune Light\n Author: Bram de Haan (http://atelierbram.github.io/syntax-highlighting/atelier-schemes/dune)\n Pygments template by Jan T. Sott (https://github.com/idleberg)\n Created with Base16 Builder by Chris Kempson (https://github.com/chriskempson/base16-builder)\n*/\n.highlight .hll { background-color: #e8e4cf }\n.highlight { color: #6E6B5E; }\n.highlight .c { color: #999580 } /* Comment */\n.highlight .err { color: #d73737 } /* Error */\n.highlight .k { color: #b854d4 } /* Keyword */\n.highlight .l { color: #b65611 } /* Literal */\n.highlight .n { color: #6E6B5E; } /* Name */\n.highlight .o { color: #1fad83 } /* Operator */\n.highlight .p { color: #6E6B5E; } /* Punctuation */\n.highlight .cm { color: #999580 } /* Comment.Multiline */\n.highlight .cp { color: #999580 } /* Comment.Preproc */\n.highlight .c1 { color: #999580 } /* Comment.Single */\n.highlight .cs { color: #999580 } /* Comment.Special */\n.highlight .gd { color: #d73737 } /* Generic.Deleted */\n.highlight .ge { font-style: italic } /* Generic.Emph */\n.highlight .gh { color: #20201d; font-weight: bold } /* Generic.Heading */\n.highlight .gi { color: #60ac39 } /* Generic.Inserted */\n.highlight .gp { color: #999580; font-weight: bold } /* Generic.Prompt */\n.highlight .gs { font-weight: bold } /* Generic.Strong */\n.highlight .gu { color: #1fad83; font-weight: bold } /* Generic.Subheading */\n.highlight .kc { color: #b854d4 } /* Keyword.Constant */\n.highlight .kd { color: #b854d4 } /* Keyword.Declaration */\n.highlight .kn { color: #1fad83 } /* Keyword.Namespace */\n.highlight .kp { color: #b854d4 } /* Keyword.Pseudo */\n.highlight .kr { color: #b854d4 } /* Keyword.Reserved */\n.highlight .kt { color: #ae9513 } /* Keyword.Type */\n.highlight .ld { color: #60ac39 } /* Literal.Date */\n.highlight .m { color: #b65611 } /* Literal.Number */\n.highlight .s { color: #2A9292; } /* Literal.String */\n.highlight .na { color: #6684e1 } /* Name.Attribute */\n.highlight .nb { color: #B65611; } /* Name.Builtin */\n.highlight .nc { color: #ae9513 } /* Name.Class */\n.highlight .no { color: #d73737 } /* Name.Constant */\n.highlight .nd { color: #1fad83 } /* Name.Decorator */\n.highlight .ni { color: #20201d } /* Name.Entity */\n.highlight .ne { color: #d73737 } /* Name.Exception */\n.highlight .nf { color: #6684e1 } /* Name.Function */\n.highlight .nl { color: #20201d } /* Name.Label */\n.highlight .nn { color: #ae9513 } /* Name.Namespace */\n.highlight .nx { color: #6684e1 } /* Name.Other */\n.highlight .py { color: #20201d } /* Name.Property */\n.highlight .nt { color: #1fad83 } /* Name.Tag */\n.highlight .nv { color: #d73737 } /* Name.Variable */\n.highlight .ow { color: #1fad83 } /* Operator.Word */\n.highlight .w { color: #20201d } /* Text.Whitespace */\n.highlight .mf { color: #b65611 } /* Literal.Number.Float */\n.highlight .mh { color: #b65611 } /* Literal.Number.Hex */\n.highlight .mi { color: #b65611 } /* Literal.Number.Integer */\n.highlight .mo { color: #b65611 } /* Literal.Number.Oct */\n.highlight .sb { color: #60ac39 } /* Literal.String.Backtick */\n.highlight .sc { color: #20201d } /* Literal.String.Char */\n.highlight .sd { color: #999580 } /* Literal.String.Doc */\n.highlight .s2 { color: #60ac39 } /* Literal.String.Double */\n.highlight .se { color: #b65611 } /* Literal.String.Escape */\n.highlight .sh { color: #60ac39 } /* Literal.String.Heredoc */\n.highlight .si { color: #b65611 } /* Literal.String.Interpol */\n.highlight .sx { color: #60ac39 } /* Literal.String.Other */\n.highlight .sr { color: #60ac39 } /* Literal.String.Regex */\n.highlight .s1 { color: #60ac39 } /* Literal.String.Single */\n.highlight .ss { color: #60ac39 } /* Literal.String.Symbol */\n.highlight .bp { color: #CD0101 } /* Name.Builtin.Pseudo */\n.highlight .vc { color: #d73737 } /* Name.Variable.Class */\n.highlight .vg { color: #d73737 } /* Name.Variable.Global */\n.highlight .vi { color: #d73737 } /* Name.Variable.Instance */\n.highlight .il { color: #b65611 } /* Literal.Number.Integer.Long */\n", "meta": {"content_hash": "896ebb0d0fed348fdd4cc99a7dd7489b", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 100, "avg_line_length": 56.25352112676056, "alnum_prop": 0.6479719579369053, "repo_name": "starsep/rust-www", "id": "c10c63c767699b7345d1a22b35890dfa72ee1ecf", "size": "3994", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "css/syntax-highlight.css", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "46168"}, {"name": "HTML", "bytes": "217249"}, {"name": "JavaScript", "bytes": "15235"}, {"name": "Python", "bytes": "1195"}, {"name": "Ruby", "bytes": "806"}, {"name": "Rust", "bytes": "5235"}, {"name": "Shell", "bytes": "78"}]}} {"text": "export default {\n newConference: '\u65b0\u3057\u3044\u4f1a\u8b70',\n dialInNumber: '\u30c0\u30a4\u30e4\u30eb\u30a4\u30f3\u756a\u53f7',\n host: '\u30db\u30b9\u30c8',\n participants: '\u53c2\u52a0\u8005',\n internationalParticipants: '\u6d77\u5916\u306e\u53c2\u52a0\u8005',\n internationalNumbersHeader: '\u56fd\u969b\u30c0\u30a4\u30e4\u30eb\u30a4\u30f3\u756a\u53f7\u306e\u9078\u629e',\n search: '\u691c\u7d22...',\n inviteWithText: '\u30c6\u30ad\u30b9\u30c8\u3067\u62db\u5f85',\n inviteText: '{brandName}\u4f1a\u8b70\u306b\u53c2\u52a0\u3057\u3066\u304f\u3060\u3055\u3044\\u3002\\r\\n\\r\\n\u30c0\u30a4\u30e4\u30eb\u30a4\u30f3\u756a\u53f7\\uFF1A{formattedDialInNumber} \\r\\n{additionalNumbersSection} \\r\\n\u53c2\u52a0\u8005\u306e\u30a2\u30af\u30bb\u30b9\\uFF1A{participantCode} \\r\\n\\r\\n\u56fd\u969b\u30c0\u30a4\u30e4\u30eb\u30a4\u30f3\u756a\u53f7\u304c\u5fc5\u8981\u306a\u5834\u5408\u306f\\u3001\u6b21\u3092\u3054\u89a7\u304f\u3060\u3055\u3044\\uFF1A{dialInNumbersLinks} \\r\\n\\r\\n\u3053\u306e\u96fb\u8a71\u4f1a\u8b70\u306f\\u3001{brandName} Conferencing\u3092\u4f7f\u7528\u3057\u3066\u958b\u50ac\u3055\u308c\u3066\u3044\u307e\u3059\\u3002',\n hostAccess: '\u30db\u30b9\u30c8\u306e\u30a2\u30af\u30bb\u30b9',\n participantsAccess: '\u53c2\u52a0\u8005\u306e\u30a2\u30af\u30bb\u30b9',\n addinalDialInNumbers: '\u8ffd\u52a0\u306e\u30c0\u30a4\u30e4\u30eb\u30a4\u30f3\u756a\u53f7',\n selectNumbers: '\u756a\u53f7\u306e\u9078\u629e',\n enableJoinBeforeHost: '\u30db\u30b9\u30c8\u3088\u308a\u524d\u306e\u53c2\u52a0\u3092\u53ef\u80fd\u306b\u3059\u308b',\n conferenceCommands: '\u4f1a\u8b70\u30b3\u30de\u30f3\u30c9',\n inviteWithGCalendar: 'Google\u30ab\u30ec\u30f3\u30c0\u30fc\u3092\u4f7f\u7528\u3057\u3066\u62db\u5f85',\n joinAsHost: '\u4f1a\u8b70\u3092\u8d77\u52d5',\n internationalNumber: '\u56fd\u969b\u30c0\u30a4\u30e4\u30eb\u30a4\u30f3\u756a\u53f7\\uFF1A',\n};\n\n// @key: @#@\"newConference\"@#@ @source: @#@\"New Conference\"@#@\n// @key: @#@\"dialInNumber\"@#@ @source: @#@\"Dial-in Number\"@#@\n// @key: @#@\"hostAccess\"@#@ @source: @#@\"Host Access\"@#@\n// @key: @#@\"participantsAccess\"@#@ @source: @#@\"Participants Access\"@#@\n// @key: @#@\"addinalDialInNumbers\"@#@ @source: @#@\"Additional Dial-in Numbers\"@#@\n// @key: @#@\"selectNumbers\"@#@ @source: @#@\"Select Numbers\"@#@\n// @key: @#@\"enableJoinBeforeHost\"@#@ @source: @#@\"Enable join before Host\"@#@\n// @key: @#@\"conferenceCommands\"@#@ @source: @#@\"Conference Commands\"@#@\n// @key: @#@\"inviteWithGCalendar\"@#@ @source: @#@\"Invite with Google Calendar\"@#@\n// @key: @#@\"inviteWithText\"@#@ @source: @#@\"Invite with Text\"@#@\n// @key: @#@\"joinAsHost\"@#@ @source: @#@\"Launch Conference\"@#@\n// @key: @#@\"internationalNumber\"@#@ @source: @#@\"International Dial-in Numbers:\"@#@\n// @key: @#@\"inviteText_att\"@#@ @source: @#@\"Please join the {brandName} conference.\\n\\nDial-In Number: {formattedDialInNumber} \\n{additionalNumbersSection} \\nParticipant Access: {participantCode} \\n\\nNeed an international dial-in phone number? Please visit {dialInNumbersLinks} \\n\\nThis conference call is brought to you by {brandName} Conferencing.\"@#@\n// @key: @#@\"inviteText_bt\"@#@ @source: @#@\"Please join the {brandName} conference.\\n\\nDial-In Number: {formattedDialInNumber} \\n{additionalNumbersSection} \\nParticipant Access: {participantCode} \\n\\nAdditional dial-in numbers {dialInNumbersLinks}\"@#@\n// @key: @#@\"inviteText_rc\"@#@ @source: @#@\"Please join the {brandName} conference.\\n\\nDial-In Number: {formattedDialInNumber} \\n{additionalNumbersSection} \\nParticipant Access: {participantCode} \\n\\nNeed an international dial-in phone number? Please visit {dialInNumbersLinks} \\n\\nThis conference call is brought to you by {brandName} Conferencing.\"@#@\n// @key: @#@\"inviteText_telus\"@#@ @source: @#@\"Please join the {brandName} conference.\\n\\nDial-In Number: {formattedDialInNumber} \\n{additionalNumbersSection} \\nParticipant Access: {participantCode} \\n\\nAdditional dial-in numbers {dialInNumbersLinks}\"@#@\n", "meta": {"content_hash": "fb9e78995c5b94bc89f88d12455d46de", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 354, "avg_line_length": 78.67567567567568, "alnum_prop": 0.7124699416008244, "repo_name": "u9520107/ringcentral-js-widget", "id": "9a1069fb22239d4014bc1bd79928c1b619fd2b80", "size": "3309", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/ringcentral-widgets/components/ConferencePanel/i18n/ja-JP.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "90533"}, {"name": "HTML", "bytes": "2967"}, {"name": "JavaScript", "bytes": "433434"}, {"name": "Shell", "bytes": "1001"}]}} {"text": "\n\npackage org.gearvrf;\n\nimport org.gearvrf.utility.Log;\nimport org.gearvrf.utility.DockEventReceiver;\nimport org.gearvrf.utility.VrAppSettings;\n\nimport android.app.Activity;\nimport android.content.Intent;\nimport android.content.pm.ActivityInfo;\nimport android.os.Build;\nimport android.os.Bundle;\nimport android.view.KeyEvent;\nimport android.view.MotionEvent;\nimport android.view.Window;\nimport android.view.WindowManager;\n\nimport com.oculus.vrappframework.VrActivity;\n\n/**\n * The typical GVRF application will have a single Android {@link Activity},\n * which must descend from {@link GVRActivity}, not directly from\n * {@code Activity}.\n * \n * {@code GVRActivity} creates and manages the internal classes which use sensor\n * data to manage a viewpoint, and thus present an appropriate stereoscopic view\n * of your scene graph. {@code GVRActivity} also gives GVRF a full-screen window\n * in landscape orientation with no title bar.\n */\npublic class GVRActivity extends VrActivity {\n\n private static final String TAG = Log.tag(GVRActivity.class);\n\n // these values are copy of enum KeyEventType in VrAppFramework/Native_Source/Input.h\n public static final int KEY_EVENT_NONE = 0;\n public static final int KEY_EVENT_SHORT_PRESS = 1;\n public static final int KEY_EVENT_DOUBLE_TAP = 2;\n public static final int KEY_EVENT_LONG_PRESS = 3;\n public static final int KEY_EVENT_DOWN = 4;\n public static final int KEY_EVENT_UP = 5;\n public static final int KEY_EVENT_MAX = 6;\n\n private GVRViewManager mGVRViewManager = null;\n private GVRCamera mCamera;\n private VrAppSettings mAppSettings;\n private long mPtr;\n\n static {\n System.loadLibrary(\"gvrf\");\n }\n\n public static native long nativeSetAppInterface(VrActivity act,\n String fromPackageName, String commandString, String uriString);\n\n static native void nativeSetCamera(long appPtr, long camera);\n static native void nativeSetCameraRig(long appPtr, long cameraRig);\n static native void nativeOnDock(long appPtr);\n static native void nativeOnUndock(long appPtr);\n\n @Override\n protected void onCreate(Bundle savedInstanceState) {\n /*\n * Removes the title bar and the status bar.\n */\n requestWindowFeature(Window.FEATURE_NO_TITLE);\n getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,\n WindowManager.LayoutParams.FLAG_FULLSCREEN);\n setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);\n mAppSettings = new VrAppSettings();\n super.onCreate(savedInstanceState);\n\n Intent intent = getIntent();\n String commandString = VrActivity.getCommandStringFromIntent(intent);\n String fromPackageNameString = VrActivity\n .getPackageStringFromIntent(intent);\n String uriString = VrActivity.getUriStringFromIntent(intent);\n \n mPtr = nativeSetAppInterface(this, fromPackageNameString,\n commandString, uriString);\n \n setAppPtr(mPtr);\n\n mDockEventReceiver = new DockEventReceiver(this, mRunOnDock, mRunOnUndock);\n }\n\n protected void onInitAppSettings(VrAppSettings appSettings) {\n\n }\n\n public VrAppSettings getAppSettings(){\n return mAppSettings;\n }\n\n @Override\n protected void onPause() {\n super.onPause();\n if (mGVRViewManager != null) {\n mGVRViewManager.onPause();\n }\n if (null != mDockEventReceiver) {\n mDockEventReceiver.stop();\n }\n }\n\n @Override\n protected void onResume() {\n super.onResume();\n if (mGVRViewManager != null) {\n mGVRViewManager.onResume();\n }\n if (null != mDockEventReceiver) {\n mDockEventReceiver.start();\n }\n }\n\n @Override\n protected void onDestroy() {\n super.onDestroy();\n if (mGVRViewManager != null) {\n mGVRViewManager.onDestroy();\n }\n }\n\n /**\n * Links {@linkplain GVRScript a script} to the activity; sets the version;\n * sets the lens distortion compensation parameters.\n * \n * @param gvrScript\n * An instance of {@link GVRScript} to handle callbacks on the GL\n * thread.\n * @param distortionDataFileName\n * Name of the XML file containing the device parameters. We\n * currently only support the Galaxy Note 4 because that is the\n * only shipping device with the proper GL extensions. When more\n * devices support GVRF, we will publish new device files, along\n * with app-level auto-detect guidelines. This approach will let\n * you support new devices, using the same version of GVRF that\n * you have already tested and approved.\n * \n *

\n * The XML filename is relative to the application's\n * {@code assets} directory, and can specify a file in a\n * directory under the application's {@code assets} directory.\n */\n public void setScript(GVRScript gvrScript, String distortionDataFileName) {\n if (getRequestedOrientation() == ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE) {\n\n GVRXMLParser xmlParser = new GVRXMLParser(getAssets(),\n distortionDataFileName, mAppSettings);\n onInitAppSettings(mAppSettings);\n if (isVrSupported() && !mAppSettings.getMonoScopicModeParms().isMonoScopicMode()) {\n mGVRViewManager = new GVRViewManager(this, gvrScript, xmlParser);\n } else {\n mGVRViewManager = new GVRMonoscopicViewManager(this, gvrScript,\n xmlParser);\n }\n } else {\n throw new IllegalArgumentException(\n \"You can not set orientation to portrait for GVRF apps.\");\n }\n }\n\n /**\n * Sets whether to force rendering to be single-eye, monoscopic view.\n * \n * @param force\n * If true, will create a GVRMonoscopicViewManager when\n * {@linkplain setScript setScript()} is called. If false, will\n * proceed to auto-detect whether the device supports VR\n * rendering and choose the appropriate ViewManager. This call\n * will only have an effect if it is called before\n * {@linkplain #setScript(GVRScript, String) setScript()}.\n * \n * @deprecated\n * \n */\n @Deprecated\n public void setForceMonoscopic(boolean force) {\n mAppSettings.monoScopicModeParms.setMonoScopicMode(force);\n }\n\n /**\n * Returns whether a monoscopic view was asked to be forced during\n * {@linkplain #setScript(GVRScript, String) setScript()}.\n * \n * @see setForceMonoscopic\n * @deprecated\n */\n @Deprecated\n public boolean getForceMonoscopic() {\n return mAppSettings.monoScopicModeParms.isMonoScopicMode();\n }\n\n private boolean isVrSupported() {\n if ((Build.MODEL.contains(\"SM-N910\"))\n || (Build.MODEL.contains(\"SM-N916\"))\n || (Build.MODEL.contains(\"SM-G920\"))\n || (Build.MODEL.contains(\"SM-G925\"))) {\n return true;\n }\n\n return false;\n }\n\n public long getAppPtr(){\n return mPtr;\n }\n \n void drawFrame() {\n mGVRViewManager.onDrawFrame();\n }\n\n void oneTimeInit() {\n mGVRViewManager.onSurfaceCreated();\n Log.e(TAG, \" oneTimeInit from native layer\");\n }\n\n void oneTimeShutDown() {\n Log.e(TAG, \" oneTimeShutDown from native layer\");\n }\n\n void beforeDrawEyes() {\n mGVRViewManager.beforeDrawEyes();\n }\n\n void onDrawEyeView(int eye, float fovDegrees) {\n mGVRViewManager.onDrawEyeView(eye, fovDegrees);\n }\n\n void afterDrawEyes() {\n mGVRViewManager.afterDrawEyes();\n }\n\n void setCamera(GVRCamera camera) {\n mCamera = camera;\n\n nativeSetCamera(getAppPtr(), camera.getNative());\n }\n\n void setCameraRig(GVRCameraRig cameraRig) {\n nativeSetCameraRig(getAppPtr(), cameraRig.getNative());\n }\n\n @Override\n public boolean dispatchTouchEvent(MotionEvent event) {\n boolean handled = super.dispatchTouchEvent(event);// VrActivity's\n\n /*\n * Situation: while the super class VrActivity is implementing\n * dispatchTouchEvent() without calling its own super\n * dispatchTouchEvent(), we still need to call the\n * VRTouchPadGestureDetector onTouchEvent. Call it here, similar way\n * like in place of viewGroup.onInterceptTouchEvent()\n */\n onTouchEvent(event);\n\n return handled;\n }\n\n boolean onKeyEventNative(int keyCode, int eventType) {\n\n /*\n * Currently VrLib does not call Java onKeyDown()/onKeyUp() in the\n * Activity class. In stead, it calls VrAppInterface->OnKeyEvent if\n * defined in the native side, to give a chance to the app before it\n * intercepts. With this implementation, the developers can expect\n * consistently their key event methods are called as usual in case they\n * want to use the events. The parameter eventType matches with the\n * native side. It can be more than two, DOWN and UP, if the native\n * supports in the future.\n */\n\n switch (eventType) {\n case KEY_EVENT_SHORT_PRESS:\n return onKeyShortPress(keyCode);\n case KEY_EVENT_DOUBLE_TAP:\n return onKeyDoubleTap(keyCode);\n case KEY_EVENT_LONG_PRESS:\n return onKeyLongPress(keyCode, new KeyEvent(KeyEvent.ACTION_DOWN, keyCode));\n case KEY_EVENT_DOWN:\n return onKeyDown(keyCode, new KeyEvent(KeyEvent.ACTION_DOWN, keyCode));\n case KEY_EVENT_UP:\n return onKeyUp(keyCode, new KeyEvent(KeyEvent.ACTION_UP, keyCode));\n case KEY_EVENT_MAX:\n return onKeyMax(keyCode);\n default:\n return false;\n }\n }\n\n public boolean onKeyShortPress(int keyCode) {\n return false;\n }\n\n public boolean onKeyDoubleTap(int keyCode) {\n return false;\n }\n\n public boolean onKeyMax(int keyCode) {\n return false;\n }\n\n boolean updateSensoredScene() {\n return mGVRViewManager.updateSensoredScene();\n }\n\n private final Runnable mRunOnDock = new Runnable() {\n @Override\n public void run() {\n nativeOnDock(getAppPtr());\n }\n };\n\n private final Runnable mRunOnUndock = new Runnable() {\n @Override\n public void run() {\n nativeOnUndock(getAppPtr());\n }\n };\n\n private DockEventReceiver mDockEventReceiver;\n}\n", "meta": {"content_hash": "fe5602c2341fe551ed989c6a62b5ea12", "timestamp": "", "source": "github", "line_count": 323, "max_line_length": 95, "avg_line_length": 33.26934984520124, "alnum_prop": 0.636608970779825, "repo_name": "rongguodong/GearVRf", "id": "9e68680061713d1e78d6bdf3f507723ace04d5eb", "size": "11354", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "GVRf/Framework/src/org/gearvrf/GVRActivity.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Assembly", "bytes": "56027"}, {"name": "C", "bytes": "1030140"}, {"name": "C++", "bytes": "2898127"}, {"name": "CMake", "bytes": "1409"}, {"name": "CSS", "bytes": "869"}, {"name": "HTML", "bytes": "1636"}, {"name": "Java", "bytes": "13667787"}, {"name": "JavaScript", "bytes": "281672"}, {"name": "Makefile", "bytes": "6615"}, {"name": "Python", "bytes": "3057"}, {"name": "R", "bytes": "29550"}, {"name": "Shell", "bytes": "1837"}, {"name": "XSLT", "bytes": "2509"}]}} {"text": "@implementation TLNavigationController\n\n- (UIInterfaceOrientationMask)supportedInterfaceOrientations {\n\treturn UIInterfaceOrientationMaskAllButUpsideDown;\n}\n\n@end\n", "meta": {"content_hash": "1a3db2808b11caa83f25695658af20b5", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 62, "avg_line_length": 23.285714285714285, "alnum_prop": 0.8773006134969326, "repo_name": "paulrehkugler/xkcd", "id": "de17878c3937289a717166c98157d95a3f9392bf", "size": "292", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "TLCommon/TLNavigationController.m", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "407"}, {"name": "Objective-C", "bytes": "122680"}, {"name": "Swift", "bytes": "11309"}]}} {"text": "\n\n\n#ifndef _FILEIO_DOT_H\n#define _FILEIO_DOT_H\n\n#include \n#include \n#include \n#include \"system_config.h\"\n#include \"system.h\"\n\n#if __XC16_VERSION__ == 1020\n#error XC16 v1.20 is not compatible with this firmware, please use a later version of the XC16 compiler.\n#endif\n\n#if __XC8_VERSION == 1300\n#error XC8 v1.30 is not compatible with this firmware, please use either XC8 v1.21 or a later version of the XC8 compiler.\n#endif\n\n/*******************************************************************/\n/* Structures and defines */\n/*******************************************************************/\n\n// Enumeration for general purpose return values\ntypedef enum\n{\n FILEIO_RESULT_SUCCESS = 0, // File operation was a success\n FILEIO_RESULT_FAILURE = -1 // File operation failed\n} FILEIO_RESULT;\n\n// Definition to indicate an invalid file handle\n#define FILEIO_INVALID_HANDLE NULL\n\n#define FILEIO_FILE_NAME_LENGTH_8P3 12 // Maximum file name length for 8.3 formatted files\n#define FILEIO_FILE_NAME_LENGTH_8P3_NO_RADIX 11 // Maximum file name length for 8.3 formatted files with no radix\n\n// Enumeration for formatting modes\ntypedef enum\n{\n FILEIO_FORMAT_ERASE = 0, // Erases the contents of the partition\n FILEIO_FORMAT_BOOT_SECTOR // Creates a boot sector based on user-specified information and erases any existing information\n} FILEIO_FORMAT_MODE;\n\n// Enumeration for specific return codes\ntypedef enum\n{\n FILEIO_ERROR_NONE = 0, // No error\n FILEIO_ERROR_ERASE_FAIL, // An erase failed\n FILEIO_ERROR_NOT_PRESENT, // No device was present\n FILEIO_ERROR_NOT_FORMATTED, // The disk is of an unsupported format\n FILEIO_ERROR_BAD_PARTITION, // The boot record is bad\n FILEIO_ERROR_UNSUPPORTED_FS, // The file system type is unsupported\n FILEIO_ERROR_INIT_ERROR, // An initialization error has occured\n FILEIO_ERROR_UNINITIALIZED, // An operation was performed on an uninitialized device\n FILEIO_ERROR_BAD_SECTOR_READ, // A bad read of a sector occured\n FILEIO_ERROR_WRITE, // Could not write to a sector\n FILEIO_ERROR_INVALID_CLUSTER, // Invalid cluster value > maxcls\n FILEIO_ERROR_DRIVE_NOT_FOUND, // The specified drive could not be found\n FILEIO_ERROR_FILE_NOT_FOUND, // Could not find the file on the device\n FILEIO_ERROR_DIR_NOT_FOUND, // Could not find the directory\n FILEIO_ERROR_BAD_FILE, // File is corrupted\n FILEIO_ERROR_DONE, // No more files in this directory\n FILEIO_ERROR_COULD_NOT_GET_CLUSTER, // Could not load/allocate next cluster in file\n FILEIO_ERROR_FILENAME_TOO_LONG, // A specified file name is too long to use\n FILEIO_ERROR_FILENAME_EXISTS, // A specified filename already exists on the device\n FILEIO_ERROR_INVALID_FILENAME, // Invalid file name\n FILEIO_ERROR_DELETE_DIR, // The user tried to delete a directory with FILEIO_Remove\n FILEIO_ERROR_DELETE_FILE, // The user tried to delete a file with FILEIO_DirectoryRemove\n FILEIO_ERROR_DIR_FULL, // All root dir entry are taken\n FILEIO_ERROR_DRIVE_FULL, // All clusters in partition are taken\n FILEIO_ERROR_DIR_NOT_EMPTY, // This directory is not empty yet, remove files before deleting\n FILEIO_ERROR_UNSUPPORTED_SIZE, // The disk is too big to format as FAT16\n FILEIO_ERROR_WRITE_PROTECTED, // Card is write protected\n FILEIO_ERROR_FILE_UNOPENED, // File not opened for the write\n FILEIO_ERROR_SEEK_ERROR, // File location could not be changed successfully\n FILEIO_ERROR_BAD_CACHE_READ, // Bad cache read\n FILEIO_ERROR_FAT32_UNSUPPORTED, // FAT 32 - card not supported\n FILEIO_ERROR_READ_ONLY, // The file is read-only\n FILEIO_ERROR_WRITE_ONLY, // The file is write-only\n FILEIO_ERROR_INVALID_ARGUMENT, // Invalid argument\n FILEIO_ERROR_TOO_MANY_FILES_OPEN, // Too many files are already open\n FILEIO_ERROR_TOO_MANY_DRIVES_OPEN, // Too many drives are already open\n FILEIO_ERROR_UNSUPPORTED_SECTOR_SIZE, // Unsupported sector size\n FILEIO_ERROR_NO_LONG_FILE_NAME, // Long file name was not found\n FILEIO_ERROR_EOF // End of file reached\n} FILEIO_ERROR_TYPE;\n\n// Enumeration defining standard attributes used by FAT file systems\ntypedef enum\n{\n FILEIO_ATTRIBUTE_READ_ONLY = 0x01, // Read-only attribute. A file with this attribute should not be written to.\n FILEIO_ATTRIBUTE_HIDDEN = 0x02, // Hidden attribute. A file with this attribute may be hidden from the user.\n FILEIO_ATTRIBUTE_SYSTEM = 0x04, // System attribute. A file with this attribute is used by the operating system and should not be modified.\n FILEIO_ATTRIBUTE_VOLUME = 0x08, // Volume attribute. If the first file in the root directory of a volume has this attribute, the entry name is the volume name.\n FILEIO_ATTRIBUTE_LONG_NAME = 0x0F, // A file entry with this attribute mask is used to store part of the file's Long File Name.\n FILEIO_ATTRIBUTE_DIRECTORY = 0x10, // A file entry with this attribute points to a directory.\n FILEIO_ATTRIBUTE_ARCHIVE = 0x20, // Archive attribute. A file with this attribute should be archived.\n FILEIO_ATTRIBUTE_MASK = 0x3F // Mask for all attributes.\n} FILEIO_ATTRIBUTES;\n\n// Enumeration defining base locations for seeking\ntypedef enum\n{\n FILEIO_SEEK_SET = 0, // Change the position in the file to an offset relative to the beginning of the file.\n FILEIO_SEEK_CUR, // Change the position in the file to an offset relative to the current location in the file.\n FILEIO_SEEK_END // Change the position in the file to an offset relative to the end of the file.\n} FILEIO_SEEK_BASE;\n\n// Enumeration for file access modes\ntypedef enum\n{\n FILEIO_OPEN_READ = 0x01, // Open the file for reading.\n FILEIO_OPEN_WRITE = 0x02, // Open the file for writing.\n FILEIO_OPEN_CREATE = 0x04, // Create the file if it doesn't exist.\n FILEIO_OPEN_TRUNCATE = 0x08, // Truncate the file to 0-length.\n FILEIO_OPEN_APPEND = 0x10 // Set the current read/write location in the file to the end of the file.\n} FILEIO_OPEN_ACCESS_MODES;\n\n// Enumeration of macros defining possible file system types supported by a device\ntypedef enum\n{\n FILEIO_FILE_SYSTEM_TYPE_NONE = 0, // No file system\n FILEIO_FILE_SYSTEM_TYPE_FAT12, // The device is formatted with FAT12\n FILEIO_FILE_SYSTEM_TYPE_FAT16, // The device is formatted with FAT16\n FILEIO_FILE_SYSTEM_TYPE_FAT32 // The device is formatted with FAT32\n} FILEIO_FILE_SYSTEM_TYPE;\n\n// Summary: Contains file information and is used to indicate which file to access.\n// Description: The FILEIO_OBJECT structure is used to hold file information for an open file as it's being modified or accessed. A pointer to\n// an open file's FILEIO_OBJECT structure will be passed to any library function that will modify that file.\ntypedef struct\n{\n uint32_t baseClusterDir; // The base cluster of the file's directory\n uint32_t currentClusterDir; // The current cluster of the file's directory\n uint32_t firstCluster; // The first cluster of the file\n uint32_t currentCluster; // The current cluster of the file\n uint32_t size; // The size of the file\n uint32_t absoluteOffset; // The absolute offset in the file\n void * disk; // Pointer to a device structure\n uint16_t currentSector; // The current sector in the current cluster of the file\n uint16_t currentOffset; // The position in the current sector\n uint16_t entry; // The position of the file's directory entry in its directory\n uint16_t attributes; // The file's attributes\n uint16_t time; // The file's last update time\n uint16_t date; // The file's last update date\n uint8_t timeMs; // The file's last update time (ms portion)\n char name[FILEIO_FILE_NAME_LENGTH_8P3_NO_RADIX]; // The short name of the file\n struct\n {\n unsigned writeEnabled :1; // Indicates a file was opened in a mode that allows writes\n unsigned readEnabled :1; // Indicates a file was opened in a mode that allows reads\n\n } flags;\n} FILEIO_OBJECT;\n\n// Possible results of the FSGetDiskProperties() function.\ntypedef enum\n{\n FILEIO_GET_PROPERTIES_NO_ERRORS = 0,\n FILEIO_GET_PROPERTIES_CACHE_ERROR,\n FILEIO_GET_PROPERTIES_DRIVE_NOT_MOUNTED,\n FILEIO_GET_PROPERTIES_CLUSTER_FAILURE,\n FILEIO_GET_PROPERTIES_STILL_WORKING = 0xFF\n} FILEIO_DRIVE_ERRORS;\n\n// Enumeration to define media error types\ntypedef enum\n{\n MEDIA_NO_ERROR, // No errors\n MEDIA_DEVICE_NOT_PRESENT, // The requested device is not present\n MEDIA_CANNOT_INITIALIZE // Cannot initialize media\n} FILEIO_MEDIA_ERRORS;\n\n// Media information flags. The driver's MediaInitialize function will return a pointer to one of these structures.\ntypedef struct\n{\n FILEIO_MEDIA_ERRORS errorCode; // The status of the intialization FILEIO_MEDIA_ERRORS\n // Flags\n union\n {\n uint8_t value;\n struct\n {\n uint8_t sectorSize : 1; // The sector size parameter is valid.\n uint8_t maxLUN : 1; // The max LUN parameter is valid.\n } bits;\n } validityFlags;\n\n uint16_t sectorSize; // The sector size of the target device.\n uint8_t maxLUN; // The maximum Logical Unit Number of the device.\n} FILEIO_MEDIA_INFORMATION;\n\n/***************************************************************************\n Function:\n void (*FILEIO_DRIVER_IOInitialize)(void * mediaConfig);\n\n Summary:\n Function pointer prototype for a driver function to initialize\n I/O pins and modules for a driver.\n\n Description:\n Function pointer prototype for a driver function to initialize\n I/O pins and modules for a driver.\n\n Precondition:\n None\n\n Parameters:\n mediaConfig - Pointer to a driver-defined config structure\n\n Returns:\n None\n***************************************************************************/\ntypedef void (*FILEIO_DRIVER_IOInitialize)(void * mediaConfig);\n\n/***************************************************************************\n Function:\n bool (*FILEIO_DRIVER_MediaDetect)(void * mediaConfig);\n\n Summary:\n Function pointer prototype for a driver function to detect if\n a media device is attached/available.\n\n Description:\n Function pointer prototype for a driver function to detect if\n a media device is attached/available.\n\n Precondition:\n None\n\n Parameters:\n mediaConfig - Pointer to a driver-defined config structure\n\n Returns:\n If media attached: true\n If media not atached: false\n***************************************************************************/\ntypedef bool (*FILEIO_DRIVER_MediaDetect)(void * mediaConfig);\n\n/***************************************************************************\n Function:\n FILEIO_MEDIA_INFORMATION * (*FILEIO_DRIVER_MediaInitialize)(void * mediaConfig);\n\n Summary:\n Function pointer prototype for a driver function to perform media-\n specific initialization tasks.\n\n Description:\n Function pointer prototype for a driver function to perform media-\n specific initialization tasks.\n\n Precondition:\n FILEIO_DRIVE_IOInitialize will be called first.\n\n Parameters:\n mediaConfig - Pointer to a driver-defined config structure\n\n Returns:\n FILEIO_MEDIA_INFORMATION * - Pointer to a media initialization structure\n that has been loaded with initialization values.\n***************************************************************************/\ntypedef FILEIO_MEDIA_INFORMATION * (*FILEIO_DRIVER_MediaInitialize)(void * mediaConfig);\n\n/***************************************************************************\n Function:\n bool (*FILEIO_DRIVER_MediaDeinitialize)(void * mediaConfig);\n\n Summary:\n Function pointer prototype for a driver function to deinitialize\n a media device.\n\n Description:\n Function pointer prototype for a driver function to deinitialize\n a media device.\n\n Precondition:\n None\n\n Parameters:\n mediaConfig - Pointer to a driver-defined config structure\n\n Returns:\n If Success: true\n If Failure: false\n***************************************************************************/\ntypedef bool (*FILEIO_DRIVER_MediaDeinitialize)(void * mediaConfig);\n\n/***************************************************************************\n Function:\n bool (*FILEIO_DRIVER_SectorRead)(void * mediaConfig,\n uint32_t sector_addr, uint8_t * buffer);\n\n Summary:\n Function pointer prototype for a driver function to read a sector\n of data from the device.\n\n Description:\n Function pointer prototype for a driver function to read a sector\n of data from the device.\n\n Precondition:\n The device will be initialized.\n\n Parameters:\n mediaConfig - Pointer to a driver-defined config structure\n sectorAddress - The address of the sector to read. This address\n format depends on the media.\n buffer - A buffer to store the copied data sector.\n\n Returns:\n If Success: true\n If Failure: false\n***************************************************************************/\ntypedef bool (*FILEIO_DRIVER_SectorRead)(void * mediaConfig, uint32_t sector_addr, uint8_t* buffer);\n\n/***************************************************************************\n Function:\n bool (*FILEIO_DRIVER_SectorWrite)(void * mediaConfig,\n uint32_t sectorAddress, uint8_t * buffer, bool allowWriteToZero);\n\n Summary:\n Function pointer prototype for a driver function to write a sector\n of data to the device.\n\n Description:\n Function pointer prototype for a driver function to write a sector\n of data to the device.\n\n Precondition:\n The device will be initialized.\n\n Parameters:\n mediaConfig - Pointer to a driver-defined config structure\n sectorAddress - The address of the sector to write. This address\n format depends on the media.\n buffer - A buffer containing the data to write.\n\t\tallowWriteToZero - Check to prevent writing to the master boot \n\t\t record. This will always be false on calls that write to files, \n\t\t\twhich will prevent a device from accidentally overwriting its \n\t\t\town MBR if its root or FAT are corrupted. This should only \n\t\t\tbe true if the user specifically tries to construct a new MBR.\n\n Returns:\n If Success: true\n If Failure: false\n***************************************************************************/\ntypedef uint8_t (*FILEIO_DRIVER_SectorWrite)(void * mediaConfig, uint32_t sector_addr, uint8_t* buffer, bool allowWriteToZero);\n\n/***************************************************************************\n Function:\n bool (*FILEIO_DRIVER_WriteProtectStateGet)(void * mediaConfig);\n\n Summary:\n Function pointer prototype for a driver function to determine if\n the device is write-protected.\n\n Description:\n Function pointer prototype for a driver function to determine if\n the device is write-protected.\n\n Precondition:\n None\n\n Parameters:\n mediaConfig - Pointer to a driver-defined config structure\n\n Returns:\n If write-protected: true\n If not write-protected: false\n***************************************************************************/\ntypedef bool (*FILEIO_DRIVER_WriteProtectStateGet)(void * mediaConfig);\n\n\n// Function pointer table that describes a drive being configured by the user\ntypedef struct\n{\n FILEIO_DRIVER_IOInitialize funcIOInit; // I/O Initialization function\n FILEIO_DRIVER_MediaDetect funcMediaDetect; // Media Detection function\n FILEIO_DRIVER_MediaInitialize funcMediaInit; // Media Initialization function\n FILEIO_DRIVER_MediaDeinitialize funcMediaDeinit; // Media Deinitialization function.\n FILEIO_DRIVER_SectorRead funcSectorRead; // Function to read a sector of the media.\n FILEIO_DRIVER_SectorWrite funcSectorWrite; // Function to write a sector of the media.\n FILEIO_DRIVER_WriteProtectStateGet funcWriteProtectGet; // Function to determine if the media is write-protected.\n} FILEIO_DRIVE_CONFIG;\n\n// Structure that contains the disk search information, intermediate values, and results\ntypedef struct\n{\n char disk; /* pointer to the disk we are searching */\n bool new_request; /* is this a new request or a continued request */\n FILEIO_DRIVE_ERRORS properties_status; /* status of the last call of the function */\n\n struct\n {\n uint8_t disk_format; /* disk format: FAT12, FAT16, FAT32 */\n uint16_t sector_size; /* sector size of the drive */\n uint8_t sectors_per_cluster; /* number of sectors per cluster */\n uint32_t total_clusters; /* the number of total clusters on the drive */\n uint32_t free_clusters; /* the number of free (unused) clusters on drive */\n } results; /* the results of the current search */\n\n struct\n {\n uint32_t c; \n uint32_t curcls;\n uint32_t EndClusterLimit;\n uint32_t ClusterFailValue;\n } private; /* intermediate values used to continue searches. This\n member should be used only by the FSGetDiskProperties()\n function */\n\n} FILEIO_DRIVE_PROPERTIES;\n\n// Structure to describe a FAT file system date\ntypedef union\n{\n struct\n {\n uint16_t day : 5; // Day (1-31)\n uint16_t month : 4; // Month (1-12)\n uint16_t year : 7; // Year (number of years since 1980)\n } bitfield;\n uint16_t value;\n} FILEIO_DATE;\n\n// Function to describe the FAT file system time.\ntypedef union\n{\n struct\n {\n uint16_t secondsDiv2 : 5; // (Seconds / 2) ( 1-30)\n uint16_t minutes : 6; // Minutes ( 1-60)\n uint16_t hours : 5; // Hours (1-24)\n } bitfield;\n uint16_t value;\n} FILEIO_TIME;\n\n// Structure to describe the time fields of a file\ntypedef struct \n{\n FILEIO_DATE date; // The create or write date of the file/directory.\n FILEIO_TIME time; // The create of write time of the file/directory.\n uint8_t timeMs; // The millisecond portion of the time.\n} FILEIO_TIMESTAMP;\n\n// Search structure\ntypedef struct\n{\n // Return values\n\n uint8_t shortFileName[13]; // The name of the file that has been found (NULL-terminated).\n uint8_t attributes; // The attributes of the file that has been found.\n uint32_t fileSize; // The size of the file that has been found (bytes).\n FILEIO_TIMESTAMP timeStamp; // The create or write time of the file that has been found.\n\n // Private Parameters\n uint32_t baseDirCluster;\n uint32_t currentDirCluster;\n uint16_t currentClusterOffset;\n uint16_t currentEntryOffset;\n uint16_t pathOffset;\n char driveId;\n} FILEIO_SEARCH_RECORD;\n\n/***************************************************************************\n* Prototypes *\n***************************************************************************/\n\n/***************************************************************************\n Function:\n int FILEIO_Initialize (void)\n\n Summary:\n Initialized the FILEIO library.\n\n Description:\n Initializes the structures used by the FILEIO library.\n\n Precondition:\n None.\n\n Parameters:\n void\n\n Returns:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE \n***************************************************************************/\nint FILEIO_Initialize (void);\n\n/***************************************************************************\n Function:\n int FILEIO_Reinitialize (void)\n\n Summary:\n Reinitialized the FILEIO library.\n\n Description:\n Reinitialized the structures used by the FILEIO library.\n\n Precondition:\n FILEIO_Initialize must have been called.\n\n Parameters:\n void\n\n Returns:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE \n***************************************************************************/\nint FILEIO_Reinitialize (void);\n\n/***************************************************************************\n Function:\n typedef void (*FILEIO_TimestampGet)(FILEIO_TIMESTAMP *)\n\n Summary:\n Describes the user-implemented function to provide the timestamp.\n\n Description:\n Files in a FAT files system use time values to track create time,\n access time, and last-modified time. In the FILEIO library, the \n user must implement a function that the library can call to \n obtain the current time. That function will have this format.\n\n Precondition:\n N/A.\n\n Parameters:\n FILEIO_TIMESTAMP * - Pointer to a timestamp structure that \n must be populated by the user's function.\n\n Returns:\n void\n***************************************************************************/\ntypedef void (*FILEIO_TimestampGet)(FILEIO_TIMESTAMP *);\n\n/***************************************************************************\n Function:\n void FILEIO_RegisterTimestampGet (FILEIO_TimestampGet timestampFunction)\n\n Summary:\n Registers a FILEIO_TimestampGet function with the library.\n\n Description:\n The user must call this function to specify which user-implemented \n function will be called by the library to generate timestamps.\n\n Precondition:\n FILEIO_Initialize must have been called.\n\n Parameters:\n timestampFunction - A pointer to the user-implemented function\n that will provide timestamps to the library.\n\n Returns:\n void\n***************************************************************************/\nvoid FILEIO_RegisterTimestampGet (FILEIO_TimestampGet timestampFunction);\n\n/***************************************************************************\n Function:\n bool FILEIO_MediaDetect (const FILEIO_DRIVE_CONFIG * driveConfig,\n void * mediaParameters)\n\n Summary:\n Determines if the given media is accessible.\n\n Description:\n This function determines if a specified media device is available\n for further access.\n\n Precondition:\n FILEIO_Initialize must have been called. The driveConfig struct\n must have been initialized with the media-specific parameters and\n the FILEIO_DRIVER_MediaDetect function.\n\n Parameters:\n driveConfig - Constant structure containing function pointers that\n the library will use to access the drive.\n mediaParameters - Pointer to the media-specific parameter structure\n\n Returns:\n * If media is available : true\n * If media is not available : false \n***************************************************************************/\nbool FILEIO_MediaDetect (const FILEIO_DRIVE_CONFIG * driveConfig, void * mediaParameters);\n\n/*****************************************************************************\n Function:\n FILEIO_ERROR_TYPE FILEIO_DriveMount (char driveId,\n const FILEIO_DRIVE_CONFIG * driveConfig, void * mediaParameters);\n \n Summary:\n Initializes a drive and loads its configuration information.\n Description:\n This function will initialize a drive and load the required information\n from it.\n Conditions:\n FILEIO_Initialize must have been called.\n Input:\n driveId - An alphanumeric character that will be used to\n identify the drive.\n driveConfig - Constant structure containing function pointers that\n the library will use to access the drive.\n mediaParameters - Constant structure containing media\\-specific values\n that describe which instance of the media to use for\n this operation.\n Return:\n * FILEIO_ERROR_NONE - Drive was mounted successfully\n * FILEIO_ERROR_TOO_MANY_DRIVES_OPEN - You have already mounted\n the maximum number of drives. Change FILEIO_CONFIG_MAX_DRIVES in\n fileio_config.h to increase this.\n * FILEIO_ERROR_WRITE - The library was not able to write cached\n data in the buffer to the device (can occur when using multiple drives\n and single buffer mode)\n * FILEIO_ERROR_INIT_ERROR - The driver's Media Initialize\n \\function indicated that the media could not be initialized.\n * FILEIO_ERROR_UNSUPPORTED_SECTOR_SIZE - The media's sector size\n exceeds the maximum sector size specified in fileio_config.h\n (FILEIO_CONFIG_MEDIA_SECTOR_SIZE macro)\n * FILEIO_ERROR_BAD_SECTOR_READ - The stack could not read the\n boot sector of Master Boot Record from the media.\n * FILEIO_ERROR_BAD_PARTITION - The boot signature in the MBR is\n bad on your media device.\n * FILEIO_ERROR_UNSUPPORTED_FS - The partition is formatted with\n an unsupported file system.\n * FILEIO_ERROR_NOT_FORMATTED - One of the parameters in the boot\n sector is bad in the partition being mounted. \n *****************************************************************************/\nFILEIO_ERROR_TYPE FILEIO_DriveMount (char driveId, const FILEIO_DRIVE_CONFIG * driveConfig, void * mediaParameters);\n\n/***************************************************************************\n Function:\n int FILEIO_Format (FILEIO_DRIVE_CONFIG * config,\n void * mediaParameters, char mode,\n uint32_t serialNumber, char * volumeID)\n\n Summary:\n Formats a drive.\n\n Description:\n Formats a drive.\n\n Precondition:\n FILEIO_Initialize must have been called.\n\n Parameters:\n config - Drive configuration pointer\n mode - FILEIO_FORMAT_MODE specifier\n serialNumber - Serial number to write to the drive\n volumeId - Name of the drive.\n\n Returns:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE \n***************************************************************************/\nint FILEIO_Format (FILEIO_DRIVE_CONFIG * config, void * mediaParameters, FILEIO_FORMAT_MODE mode, uint32_t serialNumber, char * volumeId);\n\n/***********************************************************************\n Function:\n int FILEIO_DriveUnmount (const char driveID)\n \n Summary:\n Unmounts a drive.\n Description:\n Unmounts a drive from the file system and writes any pending data to\n the drive.\n Conditions:\n FILEIO_DriveMount must have been called.\n Input:\n driveId - The character representation of the mounted drive.\n Return:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE \n ***********************************************************************/\nint FILEIO_DriveUnmount (const char driveId);\n\n/******************************************************************************\n Function:\n int FILEIO_Remove (const char * pathName)\n \n Summary:\n Deletes a file.\n Description:\n Deletes the file specified by pathName.\n Conditions:\n The file's drive must be mounted and the file should exist.\n Input:\n pathName - The path/name of the file.\n Return:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE\n \n * Sets error code which can be retrieved with FILEIO_ErrorGet. Note\n that if the path cannot be resolved, the error will be returned for the\n current working directory.\n * FILEIO_ERROR_INVALID_ARGUMENT - The path could not be\n resolved.\n * FILEIO_ERROR_WRITE_PROTECTED - The device is write-protected.\n * FILEIO_ERROR_INVALID_FILENAME - The file name is invalid.\n * FILEIO_ERROR_DELETE_DIR - The file being deleted is actually\n a directory (use FILEIO_DirectoryRemove)\n * FILEIO_ERROR_ERASE_FAIL - The erase operation failed.\n * FILEIO_ERROR_FILE_NOT_FOUND - The file entries for this file\n are invalid or have already been erased.\n * FILEIO_ERROR_WRITE - The updated file data and entry could\n not be written to the device.\n * FILEIO_ERROR_DONE - The directory entry could not be found.\n * FILEIO_ERROR_BAD_SECTOR_READ - The directory entry could not\n be cached. \n ******************************************************************************/\nint FILEIO_Remove (const char * pathName);\n\n/*******************************************************************************\n Function:\n int FILEIO_Rename (const char * oldPathname, const char * newFilename)\n \n Summary:\n Renames a file.\n Description:\n Renames a file specifed by oldPathname to the name specified by\n newFilename.\n Conditions:\n The file's drive must be mounted and the file/path specified by\n oldPathname must exist.\n Input:\n oldPathName - The path/name of the file to rename.\n newFileName - The new name of the file.\n Return:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE\n \n * Sets error code which can be retrieved with FILEIO_ErrorGet Note\n that if the path cannot be resolved, the error will be returned for the\n current working directory.\n * FILEIO_ERROR_INVALID_ARGUMENT - The path could not be\n resolved.\n * FILEIO_ERROR_WRITE_PROTECTED - The device is write-protected.\n * FILEIO_ERROR_INVALID_FILENAME - One of the file names is\n invalid.\n * FILEIO_ERROR_FILENAME_EXISTS - The new file name already\n exists on this device.\n * FILEIO_ERROR_FILE_NOT_FOUND - The file could not be found.\n * FILEIO_ERROR_WRITE - The updated file data and entry could\n not be written to the device.\n * FILEIO_ERROR_DONE - The directory entry could not be found or\n the library could not find a sufficient number of empty entries in the\n dir to store the new file name.\n * FILEIO_ERROR_BAD_SECTOR_READ - The directory entry could not\n be cached.\n * FILEIO_ERROR_ERASE_FAIL - The file's entries could not be\n erased (applies when renaming a long file name)\n * FILEIO_ERROR_DIR_FULL - New file entries could not be\n created.\n * FILEIO_ERROR_BAD_CACHE_READ - The lfn entries could not be\n cached. \n *******************************************************************************/\nint FILEIO_Rename (const char * oldPathName, const char * newFileName);\n\n/************************************************************\n Function:\n int FILEIO_DirectoryMake (const char * path)\n \n Summary:\n Creates the directory/directories specified by 'path.'\n\t\n Description:\n Creates the directory/directories specified by 'path.'\n\t\n Conditions:\n The specified drive must be mounted.\n\t\n Input:\n path - Path string containing all directories to create.\n\t\n Return:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE \n ************************************************************/\nint FILEIO_DirectoryMake (const char * path);\n\n/*************************************************************************\n Function:\n int FILEIO_DirectoryChange (const char * path)\n \n Summary:\n Changes the current working directory.\n\t\n Description:\n Changes the current working directory to the directory specified by\n 'path.'\n\t\n Conditions:\n The specified drive must be mounted and the directory being changed to\n should exist.\n\t\n Input:\n path - The path of the directory to change to.\n\t\n Return:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE \n *************************************************************************/\nint FILEIO_DirectoryChange (const char * path);\n\n/******************************************************************************\n Function:\n uint16_t FILEIO_DirectoryGetCurrent (char * buffer, uint16_t size)\n \n Summary:\n Gets the name of the current working directory.\n Description:\n Gets the name of the current working directory and stores it in\n 'buffer.' The directory name will be null-terminated. If the buffer\n size is insufficient to contain the whole path name, as much as\n possible will be copied and null-terminated.\n Conditions:\n A drive must be mounted.\n Input:\n buffer - The buffer to contain the current working directory name.\n size - Size of the buffer (bytes).\n Return:\n * uint16_t - The number of characters in the current working\n directory name. May exceed the size of the buffer. In this case, the\n name will be truncated to 'size' characters, but the full length of the\n path name will be returned.\n * Sets error code which can be retrieved with FILEIO_ErrorGet\n * FILEIO_ERROR_INVALID_ARGUMENT - The arguments for the buffer\n or its size were invalid.\n * FILEIO_ERROR_DIR_NOT_FOUND - One of the directories in your\n current working directory could not be found in its parent directory.\n ******************************************************************************/\nuint16_t FILEIO_DirectoryGetCurrent (char * buffer, uint16_t size);\n\n/************************************************************************\n Function:\n int FILEIO_DirectoryRemove (const char * pathName)\n \n Summary:\n Deletes a directory.\n Description:\n Deletes a directory. The specified directory must be empty.\n Conditions:\n The directory's drive must be mounted and the directory should exist.\n Input:\n pathName - The path/name of the directory to delete.\n Return:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE \n ************************************************************************/\nint FILEIO_DirectoryRemove (const char * pathName);\n\n/***************************************************************************\n Function:\n FILEIO_ERROR_TYPE FILEIO_ErrorGet (char driveId)\n\n Summary:\n Gets the last error condition of a drive.\n\n Description:\n Gets the last error condition of the specified drive.\n\n Precondition:\n The drive must have been mounted.\n\n Parameters:\n driveId - The character representation of the drive.\n\n Returns:\n FILEIO_ERROR_TYPE - The last error that occurred on the drive.\n***************************************************************************/\nFILEIO_ERROR_TYPE FILEIO_ErrorGet (char driveId);\n\n/***************************************************************************\n Function:\n void FILEIO_ErrorClear (char driveId)\n\n Summary:\n Clears the last error on a drive.\n\n Description:\n Clears the last error of the specified drive.\n\n Precondition:\n The drive must have been mounted.\n\n Parameters:\n driveId - The character representation of the drive.\n\n Returns:\n void\n***************************************************************************/\nvoid FILEIO_ErrorClear (char driveId);\n\n/***************************************************************************************\n Function:\n int FILEIO_Open (FILEIO_OBJECT * filePtr, const char * pathName, uint16_t mode)\n \n Summary:\n Opens a file for access.\n Description:\n Opens a file for access using a combination of modes specified by the\n user.\n Conditions:\n The drive containing the file must be mounted.\n Input:\n filePtr - Pointer to the file object to initialize\n pathName - The path/name of the file to open.\n mode - The mode in which the file should be opened. Specified by\n inclusive or'ing parameters from FILEIO_OPEN_ACCESS_MODES.\n Return:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE\n \n * Sets error code which can be retrieved with FILEIO_ErrorGet Note\n that if the path cannot be resolved, the error will be returned for the\n current working directory.\n * FILEIO_ERROR_INVALID_ARGUMENT - The path could not be\n resolved.\n * FILEIO_ERROR_WRITE_PROTECTED - The device is write protected\n or this function was called in a write/create mode when writes are\n disabled in configuration.\n * FILEIO_ERROR_INVALID_FILENAME - The file name is invalid.\n * FILEIO_ERROR_ERASE_FAIL - There was an error when trying to\n truncate the file.\n * FILEIO_ERROR_WRITE - Cached file data could not be written to\n the device.\n * FILEIO_ERROR_DONE - The directory entry could not be found.\n * FILEIO_ERROR_BAD_SECTOR_READ - The directory entry could not\n be cached.\n * FILEIO_ERROR_DRIVE_FULL - There are no more clusters\n available on this device that can be allocated to the file.\n * FILEIO_ERROR_FILENAME_EXISTS - All of the possible alias\n values for this file are in use.\n * FILEIO_ERROR_BAD_CACHE_READ - There was an error caching LFN\n entries.\n * FILEIO_ERROR_INVALID_CLUSTER - The next cluster in the file\n is invalid (can occur in APPEND mode).\n * FILEIO_ERROR_COULD_NOT_GET_CLUSTER - There was an error\n finding the cluster that contained the specified offset (can occur in\n APPEND mode). \n ***************************************************************************************/\nint FILEIO_Open (FILEIO_OBJECT * filePtr, const char * pathName, uint16_t mode);\n\n/***************************************************************************\n Function:\n int FILEIO_Close (FILEIO_OBJECT * handle)\n\n Summary:\n Closes a file.\n\n Description:\n Closes a file. This will save the unwritten data to the file and \n make the memory used to allocate a file available to open other \n files.\n\n Precondition:\n The drive containing the file must be mounted and the file handle \n must represent a valid, opened file.\n\n Parameters:\n handle - The handle of the file to close.\n\n Returns:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE\n \n * Sets error code which can be retrieved with FILEIO_ErrorGet\n * FILEIO_ERROR_WRITE - Data could not be written to the device.\n * FILEIO_ERROR_BAD_CACHE_READ - The file's directory entry\n could not be cached. \n***************************************************************************/\nint FILEIO_Close (FILEIO_OBJECT * handle);\n\n/***************************************************************************\n Function:\n int FILEIO_Flush (FILEIO_OBJECT * handle)\n\n Summary:\n Saves unwritten file data to the device without closing the file.\n\n Description:\n Saves unwritten file data to the device without closing the file. \n This function is useful if the user needs to continue writing to \n a file but also wants to ensure that data isn't lost in the event \n of a reset or power loss condition.\n\n Precondition:\n The drive containing the file must be mounted and the file handle \n must represent a valid, opened file. \n\n Parameters:\n handle - The handle of the file to flush.\n\n Returns:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE\n\n * Sets error code which can be retrieved with FILEIO_ErrorGet\n * FILEIO_ERROR_WRITE - Data could not be written to the device.\n * FILEIO_ERROR_BAD_CACHE_READ - The file's directory entry\n could not be cached. \n***************************************************************************/\nint FILEIO_Flush (FILEIO_OBJECT * handle);\n\n/***************************************************************************\n Function:\n int FILEIO_GetChar (FILEIO_OBJECT * handle)\n\n Summary:\n Reads a character from a file.\n\n Description:\n Reads a character from a file.\n\n Precondition:\n The drive containing the file must be mounted and the file handle \n must represent a valid, opened file.\n\n Parameters:\n handle - The handle of the file.\n\n Returns:\n * If Success: The character that was read (cast to an int).\n * If Failure: FILEIO_RESULT_FAILURE\n \n * Sets error code which can be retrieved with FILEIO_ErrorGet\n * FILEIO_ERROR_WRITE_ONLY - The file is not opened in read\n mode.\n * FILEIO_ERROR_BAD_SECTOR_READ - There was an error reading the\n FAT to determine the next cluster in the file, or an error reading the\n file data.\n * FILEIO_ERROR_INVALID_CLUSTER - The next cluster in the file\n is invalid.\n * FILEIO_ERROR_EOF - There is no next cluster in the file (EOF)\n * FILEIO_ERROR_WRITE - Cached data could not be written to the\n device. \n *******************************************************************************/\nint FILEIO_GetChar (FILEIO_OBJECT * handle);\n\n/***************************************************************************\n Function:\n int FILEIO_PutChar (char c, FILEIO_OBJECT * handle)\n\n Summary:\n Writes a character to a file.\n\n Description:\n Writes a character to a file.\n\n Precondition:\n The drive containing the file must be mounted and the file handle \n must represent a valid, opened file.\n\n Parameters:\n c - The character to write.\n handle - The handle of the file.\n\n Returns:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE\n \n * Sets error code which can be retrieved with FILEIO_ErrorGet\n * FILEIO_ERROR_READ_ONLY - The file was not opened in write\n mode.\n * FILEIO_ERROR_WRITE_PROTECTED - The media is write-protected.\n * FILEIO_ERROR_BAD_SECTOR_READ - There was an error reading the\n FAT to determine the next cluster in the file, or an error reading the\n file data.\n * FILEIO_ERROR_INVALID_CLUSTER - The next cluster in the file\n is invalid.\n * FILEIO_ERROR_WRITE - Cached data could not be written to the\n device.\n * FILEIO_ERROR_BAD_SECTOR_READ - File data could not be cached.\n * FILEIO_ERROR_DRIVE_FULL - There are no more clusters on the\n media that can be allocated to the file. \n *******************************************************************************/\nint FILEIO_PutChar (char c, FILEIO_OBJECT * handle);\n\n/***************************************************************************\n Function:\n size_t FILEIO_Read (void * buffer, size_t size, size_t count,\n FILEIO_OBJECT * handle)\n\n Summary:\n Reads data from a file.\n\n Description:\n Reads data from a file and stores it in 'buffer.'\n\n Precondition:\n The drive containing the file must be mounted and the file handle \n must represent a valid, opened file.\n\n Parameters:\n buffer - The buffer that the data will be written to.\n size - The size of data objects to read, in bytes\n count - The number of data objects to read\n handle - The handle of the file.\n\n Returns:\n The number of data objects that were read. This value will match\n 'count' if the read was successful, or be less than count if it was\n not.\n \n Sets error code which can be retrieved with FILEIO_ErrorGet:\n * FILEIO_ERROR_WRITE_ONLY - The file is not opened in read mode.\n * FILEIO_ERROR_BAD_SECTOR_READ - There was an error reading the\n FAT to determine the next cluster in the file, or an error reading the\n \\file data.\n * FILEIO_ERROR_INVALID_CLUSTER - The next cluster in the file is\n invalid.\n * FILEIO_ERROR_EOF - There is no next cluster in the file (EOF)\n * FILEIO_ERROR_WRITE - Cached data could not be written to the\n device. \n *****************************************************************************/\nsize_t FILEIO_Read (void * buffer, size_t size, size_t count, FILEIO_OBJECT * handle);\n\n/***************************************************************************\n Function:\n size_t FILEIO_Write (void * buffer, size_t size, size_t count,\n FILEIO_OBJECT * handle)\n\n Summary:\n Writes data to a file.\n\n Description:\n Writes data from 'buffer' to a file.\n\n Precondition:\n The drive containing the file must be mounted and the file handle \n must represent a valid, opened file.\n\n Parameters:\n buffer - The buffer that contains the data to write.\n size - The size of data objects to write, in bytes\n count - The number of data objects to write\n handle - The handle of the file.\n\n Returns:\n The number of data objects that were written. This value will match\n 'count' if the write was successful, or be less than count if it was\n not.\n \n Sets error code which can be retrieved with FILEIO_ErrorGet:\n * FILEIO_ERROR_READ_ONLY - The file was not opened in write mode.\n * FILEIO_ERROR_WRITE_PROTECTED - The media is write-protected.\n * FILEIO_ERROR_BAD_SECTOR_READ - There was an error reading the\n FAT to determine the next cluster in the file, or an error reading the\n file data.\n * FILEIO_ERROR_INVALID_CLUSTER - The next cluster in the file is\n invalid.\n * FILEIO_ERROR_WRITE - Cached data could not be written to the\n device.\n * FILEIO_ERROR_BAD_SECTOR_READ - File data could not be cached.\n * FILEIO_ERROR_DRIVE_FULL - There are no more clusters on the\n media that can be allocated to the file. \n *****************************************************************************/\nsize_t FILEIO_Write (const void * buffer, size_t size, size_t count, FILEIO_OBJECT * handle);\n\n/***************************************************************************\n Function:\n int FILEIO_Seek (FILEIO_OBJECT * handle, int32_t offset, int base)\n\n Summary:\n Changes the current read/write position in the file.\n\n Description:\n Changes the current read/write position in the file.\n\n Precondition:\n The drive containing the file must be mounted and the file handle \n must represent a valid, opened file.\n\n Parameters:\n handle - The handle of the file.\n offset - The offset of the new read/write position (in bytes) from \n the base location. The offset will be added to FILEIO_SEEK_SET \n or FILEIO_SEEK_CUR, or subtracted from FILEIO_SEEK_END.\n base - The base location. Is of the FILEIO_SEEK_BASE type.\n\n Returns:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE\n \n * Sets error code which can be retrieved with FILEIO_ErrorGet\n * FILEIO_ERROR_WRITE - Cached data could not be written to the\n device.\n * FILEIO_ERROR_INVALID_ARGUMENT - The specified location\n exceeds the file's size.\n * FILEIO_ERROR_BAD_SECTOR_READ - There was an error reading the\n FAT to determine the next cluster in the file, or an error reading the\n file data.\n * FILEIO_ERROR_INVALID_CLUSTER - The next cluster in the file\n is invalid.\n * FILEIO_ERROR_DRIVE_FULL - There are no more clusters on the\n media that can be allocated to the file. Clusters will be allocated to\n the file if the file is opened in a write mode and the user seeks to\n the end of a file that ends on a cluster boundary.\n * FILEIO_ERROR_COULD_NOT_GET_CLUSTER - There was an error\n finding the cluster that contained the specified offset. \n *******************************************************************************/\nint FILEIO_Seek (FILEIO_OBJECT * handle, int32_t offset, int base);\n\n/***************************************************************************\n Function:\n bool FILEIO_Eof (FILEIO_OBJECT * handle)\n\n Summary:\n Determines if the file's current read/write position is at the end \n of the file.\n\n Description:\n Determines if the file's current read/write position is at the end \n of the file. \n\n Precondition:\n The drive containing the file must be mounted and the file handle \n must represent a valid, opened file.\n\n Parameters:\n handle - The handle of the file.\n\n Returns:\n * If EOF: true\n * If Not EOF: false \n *************************************************************************/\nbool FILEIO_Eof (FILEIO_OBJECT * handle);\n\n/***************************************************************************\n Function:\n long FILEIO_Tell (FILEIO_OBJECT * handle)\n\n Summary:\n Returns the current read/write position in the file.\n\n Description:\n Returns the current read/write position in the file.\n\n Precondition:\n The drive containing the file must be mounted and the file handle \n must represent a valid, opened file.\n\n Parameters:\n handle - THe handle of the file.\n\n Returns:\n long - Offset of the current read/write position from the beginning \n of the file, in bytes.\n***************************************************************************/\nlong FILEIO_Tell (FILEIO_OBJECT * handle);\n\n/******************************************************************************\n Function:\n int FILEIO_Find (const char * fileName, unsigned int attr,\n FILEIO_SEARCH_RECORD * record, bool newSearch)\n \n Summary:\n Searches for a file in the current working directory.\n Description:\n Searches for a file in the current working directory.\n Conditions:\n A drive must have been mounted by the FILEIO library.\n Input:\n fileName - The file's name. May contain limited partial string search\n elements. '?' can be used as a single\\-character wild\\-card\n and '*' can be used as a multiple\\-character wild card\n (only at the end of the file's name or extension).\n attr - Inclusive OR of all of the attributes (FILEIO_ATTRIBUTES\n structure members) that a found file may have.\n record - Structure containing parameters about the found file. Also\n contains private information used for additional searches\n for files that match the given criteria in the same\n directory.\n newSearch - true if this is the first search for the specified file\n \\parameters in the specified directory, false otherwise.\n This parameter must be specified as 'true' the first time\n this function is called with any given FILEIO_SEARCH_RECORD\n structure. The same FILEIO_SEARCH_RECORD structure should\n be used with subsequent calls of this function to search\n for additional files matching the given criteria.\n Return:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE\n * Returns file information in the record parameter.\n \n * Sets error code which can be retrieved with FILEIO_ErrorGet Note\n that if the path cannot be resolved, the error will be returned for the\n current working directory.\n * FILEIO_ERROR_INVALID_ARGUMENT - The path could not be\n resolved.\n * FILEIO_ERROR_INVALID_FILENAME - The file name is invalid.\n * FILEIO_ERROR_BAD_CACHE_READ - There was an error searching\n directory entries.\n * FILEIO_ERROR_DONE - File not found. \n ******************************************************************************/\nint FILEIO_Find (const char * fileName, unsigned int attr, FILEIO_SEARCH_RECORD * record, bool newSearch);\n\n/***************************************************************************\n Function:\n int FILEIO_LongFileNameGet (FILEIO_SEARCH_RECORD * record, uint16_t * buffer, uint16_t length)\n\n Summary:\n Obtains the long file name of a file found by the FILEIO_Find \n function.\n\n Description:\n This function will obtain the long file name of a file found \n by the FILEIO_Find function and copy it into a user-specified \n buffer. The name will be returned in unicode characters.\n\n Precondition:\n A drive must have been mounted by the FILEIO library. The \n FILEIO_SEARCH_RECORD structure must contain valid file information \n obtained from the FILEIO_Find function.\n\n Parameters:\n record - The file record obtained from a successful call of \n FILEIO_Find.\n buffer - A buffer to contain the long file name of the file.\n length - The length of the buffer, in 16-bit words.\n\n Returns:\n * If Success: FILEIO_RESULT_SUCCESS\n * If Failure: FILEIO_RESULT_FAILURE\n \n * Sets error code which can be retrieved with FILEIO_ErrorGet Note\n that if the path cannot be resolved, the error will be returned for the\n current working directory.\n * FILEIO_ERROR_INVALID_ARGUMENT - The path could not be\n resolved.\n * FILEIO_ERROR_NO_LONG_FILE_NAME - The short file name does not\n have an associated long file name.\n * FILEIO_ERROR_DONE - The directory entry could not be cached\n because the entryOffset contained in record was invalid.\n * FILEIO_ERROR_WRITE - Cached data could not be written to the\n device.\n * FILEIO_ERROR_BAD_SECTOR_READ - The directory entry could not\n be cached because there was an error reading from the device. \n ***************************************************************************************************/\nint FILEIO_LongFileNameGet (FILEIO_SEARCH_RECORD * record, uint16_t * buffer, uint16_t length);\n\n/********************************************************************\n Function:\n FILEIO_FILE_SYSTEM_TYPE FILEIO_FileSystemTypeGet (char driveId)\n \n Summary:\n Describes the file system type of a file system.\n Description:\n Describes the file system type of a file system.\n Conditions:\n A drive must have been mounted by the FILEIO library.\n Input:\n driveId - Character representation of the mounted device.\n Return:\n * If Success: FILEIO_FILE_SYSTEM_TYPE enumeration member\n * If Failure: FILEIO_FILE_SYSTEM_NONE \n ********************************************************************/\nFILEIO_FILE_SYSTEM_TYPE FILEIO_FileSystemTypeGet (char driveId);\n\n/*********************************************************************************\n Function:\n void FILEIO_DrivePropertiesGet()\n\n Summary:\n Allows user to get the drive properties (size of drive, free space, etc)\n\n Conditions:\n 1) ALLOW_GET_FILEIO_DRIVE_PROPERTIES must be defined in FSconfig.h\n 2) a FS_FILEIO_DRIVE_PROPERTIES object must be created before the function is called\n 3) the new_request member of the FS_FILEIO_DRIVE_PROPERTIES object must be set before\n calling the function for the first time. This will start a new search.\n 4) this function should not be called while there is a file open. Close all\n files before calling this function.\n\n Input:\n properties - a pointer to a FS_FILEIO_DRIVE_PROPERTIES object where the results should\n be stored.\n\n Return Values:\n This function returns void. The properties_status of the previous call of \n this function is located in the properties.status field. This field has \n the following possible values:\n\n FILEIO_GET_PROPERTIES_NO_ERRORS - operation completed without error. Results\n are in the properties object passed into the function.\n FILEIO_GET_PROPERTIES_DRIVE_NOT_MOUNTED - there is no mounted disk. Results in\n properties object is not valid\n FILEIO_GET_PROPERTIES_CLUSTER_FAILURE - there was a failure trying to read a \n cluster from the drive. The results in the properties object is a partial\n result up until the point of the failure.\n FILEIO_GET_PROPERTIES_STILL_WORKING - the search for free sectors is still in\n process. Continue calling this function with the same properties pointer \n until either the function completes or until the partial results meets the\n application needs. The properties object contains the partial results of\n the search and can be used by the application. \n\n Side Effects:\n Can cause errors if called when files are open. Close all files before\n calling this function.\n\n Calling this function without setting the new_request member on the first\n call can result in undefined behavior and results.\n\n Calling this function after a result is returned other than\n FILEIO_GET_PROPERTIES_STILL_WORKING can result in undefined behavior and results.\n\n Description: \n This function returns the information about the mounted drive. The results \n member of the properties object passed into the function is populated with \n the information about the drive. \n\n Before starting a new request, the new_request member of the properties\n input parameter should be set to true. This will initiate a new search\n request.\n\n This function will return before the search is complete with partial results.\n All of the results except the free_clusters will be correct after the first\n call. The free_clusters will contain the number of free clusters found up\n until that point, thus the free_clusters result will continue to grow until\n the entire drive is searched. If an application only needs to know that a \n certain number of bytes is available and doesn't need to know the total free \n size, then this function can be called until the required free size is\n verified. To continue a search, pass a pointer to the same FILEIO_FILEIO_DRIVE_PROPERTIES\n object that was passed in to create the search.\n\n A new search request should be made once this function has returned a value \n other than FILEIO_GET_PROPERTIES_STILL_WORKING. Continuing a completed search\n can result in undefined behavior or results.\n\n Typical Usage:\n \n FILEIO_DRIVE_PROPERTIES disk_properties;\n\n disk_properties.new_request = true;\n\n do\n {\n FILEIO_DiskPropertiesGet(&disk_properties, 'A');\n } while (disk_properties.properties_status == FILEIO_GET_PROPERTIES_STILL_WORKING);\n \n\n results.disk_format - contains the format of the drive. Valid results are \n FAT12(1), FAT16(2), or FAT32(3).\n\n results.sector_size - the sector size of the mounted drive. Valid values are\n 512, 1024, 2048, and 4096.\n\n results.sectors_per_cluster - the number sectors per cluster.\n\n results.total_clusters - the number of total clusters on the drive. This \n can be used to calculate the total disk size (total_clusters * \n sectors_per_cluster * sector_size = total size of drive in bytes)\n\n results.free_clusters - the number of free (unallocated) clusters on the drive.\n This can be used to calculate the total free disk size (free_clusters * \n sectors_per_cluster * sector_size = total size of drive in bytes)\n\n Remarks:\n PIC24F size estimates:\n Flash - 400 bytes (-Os setting)\n\n PIC24F speed estimates:\n Search takes approximately 7 seconds per Gigabyte of drive space. Speed\n will vary based on the number of sectors per cluster and the sector size.\n *********************************************************************************/\nvoid FILEIO_DrivePropertiesGet (FILEIO_DRIVE_PROPERTIES* properties, char driveId);\n\n#endif\n", "meta": {"content_hash": "00d1aeba2d2983c29725450cbc70da86", "timestamp": "", "source": "github", "line_count": 1471, "max_line_length": 176, "avg_line_length": 42.130523453433035, "alnum_prop": 0.5914899796688934, "repo_name": "medo64/TmpUsb", "id": "a8095af34c28abf7a0362ddfed5400aabcb87f8e", "size": "63489", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "firmware/src/Microchip/Framework/fileio/fileio.h", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "840025"}, {"name": "Makefile", "bytes": "42992"}, {"name": "PHP", "bytes": "144"}, {"name": "PowerShell", "bytes": "8819"}, {"name": "Shell", "bytes": "1333"}]}} {"text": "{-# LANGUAGE TemplateHaskell, ScopedTypeVariables #-}\n{-# OPTIONS_GHC -fno-warn-orphans #-}\n\n{-| Unittests for the SlotMap.\n\n-}\n\n{-\n\nCopyright (C) 2014 Google Inc.\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n1. Redistributions of source code must retain the above copyright notice,\nthis list of conditions and the following disclaimer.\n\n2. Redistributions in binary form must reproduce the above copyright\nnotice, this list of conditions and the following disclaimer in the\ndocumentation and/or other materials provided with the distribution.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS\nIS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED\nTO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR\nPURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR\nCONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\nEXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\nPROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\nPROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\nLIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\nNEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\nSOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n-}\n\nmodule Test.Ganeti.SlotMap\n ( testSlotMap\n , genSlotLimit\n , genTestKey\n , overfullKeys\n ) where\n\nimport Prelude hiding (all)\n\nimport Control.Applicative\nimport Control.Monad\nimport Data.Foldable (all)\nimport qualified Data.Map as Map\nimport Data.Map (Map, member, keys, keysSet)\nimport Data.Set (Set, size, union)\nimport qualified Data.Set as Set\nimport Data.Traversable (traverse)\nimport Test.HUnit\nimport Test.QuickCheck\n\nimport Test.Ganeti.TestCommon\nimport Test.Ganeti.TestHelper\nimport Test.Ganeti.Types ()\n\nimport Ganeti.SlotMap\n\n{-# ANN module \"HLint: ignore Use camelCase\" #-}\n\n\n-- | Generates a number typical for the limit of a `Slot`.\n-- Useful for constructing resource bounds when not directly constructing\n-- the relevant `Slot`s.\ngenSlotLimit :: Gen Int\ngenSlotLimit = frequency [ (9, choose (1, 5))\n , (1, choose (1, 100))\n ] -- Don't create huge slot limits.\n\n\ninstance Arbitrary Slot where\n arbitrary = do\n limit <- genSlotLimit\n occ <- choose (0, limit * 2)\n return $ Slot occ limit\n\n\n-- | Generates a number typical for the occupied count of a `Slot`.\n-- Useful for constructing `CountMap`s.\ngenSlotCount :: Gen Int\ngenSlotCount = slotOccupied <$> arbitrary\n\n\n-- | Takes a slot and resamples its `slotOccupied` count to fit the limit.\nresampleFittingSlot :: Slot -> Gen Slot\nresampleFittingSlot (Slot _ limit) = do\n occ <- choose (0, limit)\n return $ Slot occ limit\n\n\n-- | What we use as key for testing `SlotMap`s.\ntype TestKey = String\n\n\n-- | Generates short strings used as `SlotMap` keys.\n--\n-- We limit ourselves to a small set of key strings with high probability to\n-- increase the chance that `SlotMap`s actually have more than one slot taken.\ngenTestKey :: Gen TestKey\ngenTestKey = frequency [ (9, elements [\"a\", \"b\", \"c\", \"d\", \"e\"])\n , (1, genPrintableAsciiString)\n ]\n\n\n-- | Generates small lists.\nlistSizeGen :: Gen Int\nlistSizeGen = frequency [ (9, choose (1, 5))\n , (1, choose (1, 100))\n ]\n\n\n-- | Generates a `SlotMap` given a generator for the keys (see `genTestKey`).\ngenSlotMap :: (Ord a) => Gen a -> Gen (SlotMap a)\ngenSlotMap keyGen = do\n n <- listSizeGen -- don't create huge `SlotMap`s\n Map.fromList <$> vectorOf n ((,) <$> keyGen <*> arbitrary)\n\n\n-- | Generates a `CountMap` given a generator for the keys (see `genTestKey`).\ngenCountMap :: (Ord a) => Gen a -> Gen (CountMap a)\ngenCountMap keyGen = do\n n <- listSizeGen -- don't create huge `CountMap`s\n Map.fromList <$> vectorOf n ((,) <$> keyGen <*> genSlotCount)\n\n\n-- | Tells which keys of a `SlotMap` are overfull.\noverfullKeys :: (Ord a) => SlotMap a -> Set a\noverfullKeys sm =\n Set.fromList [ a | (a, Slot occ limit) <- Map.toList sm, occ > limit ]\n\n\n-- | Generates a `SlotMap` for which all slots are within their limits.\ngenFittingSlotMap :: (Ord a) => Gen a -> Gen (SlotMap a)\ngenFittingSlotMap keyGen = do\n -- Generate a SlotMap, then resample all slots to be fitting.\n slotMap <- traverse resampleFittingSlot =<< genSlotMap keyGen\n when (isOverfull slotMap) $ error \"BUG: FittingSlotMap Gen is wrong\"\n return slotMap\n\n\n-- * Test cases\n\ncase_isOverfull :: Assertion\ncase_isOverfull = do\n\n assertBool \"overfull\"\n . isOverfull $ Map.fromList [(\"buck\", Slot 3 2)]\n\n assertBool \"not overfull\"\n . not . isOverfull $ Map.fromList [(\"buck\", Slot 2 2)]\n\n assertBool \"empty\"\n . not . isOverfull $ (Map.fromList [] :: SlotMap TestKey)\n\n\ncase_occupySlots_examples :: Assertion\ncase_occupySlots_examples = do\n let a n = (\"a\", Slot n 2)\n let b n = (\"b\", Slot n 4)\n\n let sm = Map.fromList [a 1, b 2]\n cm = Map.fromList [(\"a\", 1), (\"b\", 1), (\"c\", 5)]\n\n assertEqual \"fitting occupySlots\"\n (sm `occupySlots` cm)\n (Map.fromList [a 2, b 3, (\"c\", Slot 5 0)])\n\n\n-- | Union of the keys of two maps.\nkeyUnion :: (Ord a) => Map a b -> Map a c -> Set a\nkeyUnion a b = keysSet a `union` keysSet b\n\n\n-- | Tests properties of `SlotMap`s being filled up.\nprop_occupySlots :: Property\nprop_occupySlots =\n forAll arbitrary $ \\(sm :: SlotMap Int, cm :: CountMap Int) ->\n let smOcc = sm `occupySlots` cm\n in conjoin\n [ counterexample \"input keys are preserved\" $\n all (`member` smOcc) (keyUnion sm cm)\n , counterexample \"all keys must come from the input keys\" $\n all (`Set.member` keyUnion sm cm) (keys smOcc)\n ]\n\n\n-- | Tests for whether there's still space for a job given its rate\n-- limits.\ncase_hasSlotsFor_examples :: Assertion\ncase_hasSlotsFor_examples = do\n let a n = (\"a\", Slot n 2)\n let b n = (\"b\", Slot n 4)\n let c n = (\"c\", Slot n 8)\n\n let sm = Map.fromList [a 1, b 2]\n\n assertBool \"fits\" $\n sm `hasSlotsFor` Map.fromList [(\"a\", 1), (\"b\", 1)]\n\n assertBool \"doesn't fit\"\n . not $ sm `hasSlotsFor` Map.fromList [(\"a\", 1), (\"b\", 3)]\n\n let smOverfull = Map.fromList [a 1, b 2, c 10]\n\n assertBool \"fits (untouched keys overfull)\" $\n isOverfull smOverfull\n && smOverfull `hasSlotsFor` Map.fromList [(\"a\", 1), (\"b\", 1)]\n\n assertBool \"empty fitting\" $\n Map.empty `hasSlotsFor` (Map.empty :: CountMap TestKey)\n\n assertBool \"empty not fitting\"\n . not $ Map.empty `hasSlotsFor` Map.fromList [(\"a\", 1), (\"b\", 100)]\n\n assertBool \"empty not fitting\"\n . not $ Map.empty `hasSlotsFor` Map.fromList [(\"a\", 1)]\n\n\n-- | Tests properties of `hasSlotsFor` on `SlotMap`s that are known to\n-- respect their limits.\nprop_hasSlotsFor_fitting :: Property\nprop_hasSlotsFor_fitting =\n forAll (genFittingSlotMap genTestKey) $ \\sm ->\n forAll (genCountMap genTestKey) $ \\cm ->\n sm `hasSlotsFor` cm ==? not (isOverfull $ sm `occupySlots` cm)\n\n\n-- | Tests properties of `hasSlotsFor`, irrespective of whether the\n-- input `SlotMap`s respect their limits or not.\nprop_hasSlotsFor :: Property\nprop_hasSlotsFor =\n let -- Generates `SlotMap`s for combining.\n genMaps = resize 10 $ do -- We don't need very large SlotMaps.\n sm1 <- genSlotMap genTestKey\n -- We need to make sm2 smaller to make `hasSlots` below more\n -- likely (otherwise the LHS of ==> is always false).\n sm2 <- sized $ \\n -> resize (n `div` 3) (genSlotMap genTestKey)\n -- We also want to test (sm1, sm1); we have to make it more\n -- likely for it to ever happen.\n frequency [ (1, return (sm1, sm1))\n , (9, return (sm1, sm2)) ]\n\n in forAll genMaps $ \\(sm1, sm2) ->\n let fits = sm1 `hasSlotsFor` toCountMap sm2\n smOcc = sm1 `occupySlots` toCountMap sm2\n oldOverfullBucks = overfullKeys sm1\n newOverfullBucks = overfullKeys smOcc\n in conjoin\n [ counterexample \"if there's enough extra space, then the new\\\n \\ overfull keys must be as before\" $\n fits ==> (newOverfullBucks ==? oldOverfullBucks)\n -- Note that the other way around does not hold:\n -- (newOverfullBucks == oldOverfullBucks) ==> fits\n , counterexample \"joining SlotMaps must not change the number of\\\n \\ overfull keys (but may change their slot\\\n \\ counts\"\n . property $ size newOverfullBucks >= size oldOverfullBucks\n ]\n\n\ntestSuite \"SlotMap\"\n [ 'case_isOverfull\n , 'case_occupySlots_examples\n , 'prop_occupySlots\n , 'case_hasSlotsFor_examples\n , 'prop_hasSlotsFor_fitting\n , 'prop_hasSlotsFor\n ]\n", "meta": {"content_hash": "7114364e60f4c731d0f4509b3235b706", "timestamp": "", "source": "github", "line_count": 273, "max_line_length": 78, "avg_line_length": 32.78021978021978, "alnum_prop": 0.6578388646776177, "repo_name": "yiannist/ganeti", "id": "295240da9caa3222c9288f1556498458e1b197e7", "size": "8949", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "test/hs/Test/Ganeti/SlotMap.hs", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "Haskell", "bytes": "2509723"}, {"name": "JavaScript", "bytes": "8808"}, {"name": "M4", "bytes": "31972"}, {"name": "Makefile", "bytes": "96586"}, {"name": "Python", "bytes": "6231906"}, {"name": "Shell", "bytes": "151065"}]}} {"text": "% get root of current file\nroot = fullfile(fileparts(mfilename('fullpath')),'../');\n\np_generated = genpath([root '/Core']);\naddpath(p_generated);\naddpath([root '/IO']);\naddpath([root '/Data']);\naddpath([root '/Scripts']);\n\np_generated = genpath([root '/ThirdParty/tetgen1.4.3/bin']);\naddpath(p_generated);\n\np_generated = genpath([root '/ThirdParty/maslib/bin']);\naddpath(p_generated);\n\nclear p_generated;\n", "meta": {"content_hash": "81c97ff9b6a3f45fd28b2fd533c3a5bf", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 60, "avg_line_length": 25.3125, "alnum_prop": 0.6864197530864198, "repo_name": "siavashk/GMM-FEM", "id": "96770015495e84e120342582ab8d07c142dba254", "size": "405", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Scripts/add_bcpd_paths.m", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "C", "bytes": "94"}, {"name": "C++", "bytes": "2046372"}, {"name": "CMake", "bytes": "10558"}, {"name": "Makefile", "bytes": "3869"}, {"name": "Matlab", "bytes": "168332"}, {"name": "Shell", "bytes": "2371"}]}} {"text": "package main\n\nimport (\n\t\"context\"\n\n\taiplatform \"cloud.google.com/go/aiplatform/apiv1beta1\"\n\taiplatformpb \"cloud.google.com/go/aiplatform/apiv1beta1/aiplatformpb\"\n)\n\nfunc main() {\n\tctx := context.Background()\n\t// This snippet has been automatically generated and should be regarded as a code template only.\n\t// It will require modifications to work:\n\t// - It may require correct/in-range values for request initialization.\n\t// - It may require specifying regional endpoints when creating the service client as shown in:\n\t// https://pkg.go.dev/cloud.google.com/go#hdr-Client_Options\n\tc, err := aiplatform.NewEndpointClient(ctx)\n\tif err != nil {\n\t\t// TODO: Handle error.\n\t}\n\tdefer c.Close()\n\n\treq := &aiplatformpb.GetEndpointRequest{\n\t\t// TODO: Fill request struct fields.\n\t\t// See https://pkg.go.dev/cloud.google.com/go/aiplatform/apiv1beta1/aiplatformpb#GetEndpointRequest.\n\t}\n\tresp, err := c.GetEndpoint(ctx, req)\n\tif err != nil {\n\t\t// TODO: Handle error.\n\t}\n\t// TODO: Use resp.\n\t_ = resp\n}\n\n// [END aiplatform_v1beta1_generated_EndpointService_GetEndpoint_sync]\n", "meta": {"content_hash": "e3bbd8f0b19426a022d62a0e1da3edcc", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 102, "avg_line_length": 30.428571428571427, "alnum_prop": 0.7295774647887324, "repo_name": "googleapis/google-cloud-go", "id": "3067e39935255049f3815f4065db76bd9dfd979f", "size": "1814", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "internal/generated/snippets/aiplatform/apiv1beta1/EndpointClient/GetEndpoint/main.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Assembly", "bytes": "10349"}, {"name": "C", "bytes": "74"}, {"name": "Dockerfile", "bytes": "1841"}, {"name": "Go", "bytes": "7626642"}, {"name": "M4", "bytes": "43723"}, {"name": "Makefile", "bytes": "1455"}, {"name": "Python", "bytes": "718"}, {"name": "Shell", "bytes": "27309"}]}} {"text": "\n\n#include \n\n#if defined(SRS_AUTO_HTTP_CORE)\n\n#include \n#include \n#include \n#include \n\n#include \nusing namespace std;\n\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\n#endif\n\n#ifdef SRS_AUTO_HTTP_CORE\n\nSrsHttpResponseWriter::SrsHttpResponseWriter(SrsStSocket* io)\n{\n skt = io;\n hdr = new SrsHttpHeader();\n header_wrote = false;\n status = SRS_CONSTS_HTTP_OK;\n content_length = -1;\n written = 0;\n header_sent = false;\n nb_iovss_cache = 0;\n iovss_cache = NULL;\n}\n\nSrsHttpResponseWriter::~SrsHttpResponseWriter()\n{\n srs_freep(hdr);\n srs_freepa(iovss_cache);\n}\n\nint SrsHttpResponseWriter::final_request()\n{\n // write the header data in memory.\n if (!header_wrote) {\n write_header(SRS_CONSTS_HTTP_OK);\n }\n\n // complete the chunked encoding.\n if (content_length == -1) {\n std::stringstream ss;\n ss << 0 << SRS_HTTP_CRLF << SRS_HTTP_CRLF;\n std::string ch = ss.str();\n return skt->write((void*)ch.data(), (int)ch.length(), NULL);\n }\n \n // flush when send with content length\n return write(NULL, 0);\n}\n\nSrsHttpHeader* SrsHttpResponseWriter::header()\n{\n return hdr;\n}\n\nint SrsHttpResponseWriter::write(char* data, int size)\n{\n int ret = ERROR_SUCCESS;\n \n // write the header data in memory.\n if (!header_wrote) {\n write_header(SRS_CONSTS_HTTP_OK);\n }\n \n // whatever header is wrote, we should try to send header.\n if ((ret = send_header(data, size)) != ERROR_SUCCESS) {\n srs_error(\"http: send header failed. ret=%d\", ret);\n return ret;\n }\n \n // check the bytes send and content length.\n written += size;\n if (content_length != -1 && written > content_length) {\n ret = ERROR_HTTP_CONTENT_LENGTH;\n srs_error(\"http: exceed content length. ret=%d\", ret);\n return ret;\n }\n \n // ignore NULL content.\n if (!data) {\n return ret;\n }\n \n // directly send with content length\n if (content_length != -1) {\n return skt->write((void*)data, size, NULL);\n }\n \n // send in chunked encoding.\n int nb_size = snprintf(header_cache, SRS_HTTP_HEADER_CACHE_SIZE, \"%x\", size);\n \n iovec iovs[4];\n iovs[0].iov_base = (char*)header_cache;\n iovs[0].iov_len = (int)nb_size;\n iovs[1].iov_base = (char*)SRS_HTTP_CRLF;\n iovs[1].iov_len = 2;\n iovs[2].iov_base = (char*)data;\n iovs[2].iov_len = size;\n iovs[3].iov_base = (char*)SRS_HTTP_CRLF;\n iovs[3].iov_len = 2;\n \n ssize_t nwrite;\n if ((ret = skt->writev(iovs, 4, &nwrite)) != ERROR_SUCCESS) {\n return ret;\n }\n \n return ret;\n}\n\nint SrsHttpResponseWriter::writev(iovec* iov, int iovcnt, ssize_t* pnwrite)\n{\n int ret = ERROR_SUCCESS;\n \n // when header not ready, or not chunked, send one by one.\n if (!header_wrote || content_length != -1) {\n ssize_t nwrite = 0;\n for (int i = 0; i < iovcnt; i++) {\n iovec* piovc = iov + i;\n nwrite += piovc->iov_len;\n if ((ret = write((char*)piovc->iov_base, (int)piovc->iov_len)) != ERROR_SUCCESS) {\n return ret;\n }\n }\n \n if (pnwrite) {\n *pnwrite = nwrite;\n }\n \n return ret;\n }\n \n // ignore NULL content.\n if (iovcnt <= 0) {\n return ret;\n }\n \n // send in chunked encoding.\n int nb_iovss = 3 + iovcnt;\n iovec* iovss = iovss_cache;\n if (nb_iovss_cache < nb_iovss) {\n srs_freepa(iovss_cache);\n nb_iovss_cache = nb_iovss;\n iovss = iovss_cache = new iovec[nb_iovss];\n }\n \n // send in chunked encoding.\n \n // chunk size.\n int size = 0;\n for (int i = 0; i < iovcnt; i++) {\n iovec* data_iov = iov + i;\n size += data_iov->iov_len;\n }\n written += size;\n \n // chunk header\n int nb_size = snprintf(header_cache, SRS_HTTP_HEADER_CACHE_SIZE, \"%x\", size);\n iovec* iovs = iovss;\n iovs[0].iov_base = (char*)header_cache;\n iovs[0].iov_len = (int)nb_size;\n iovs++;\n \n // chunk header eof.\n iovs[0].iov_base = (char*)SRS_HTTP_CRLF;\n iovs[0].iov_len = 2;\n iovs++;\n \n // chunk body.\n for (int i = 0; i < iovcnt; i++) {\n iovec* data_iov = iov + i;\n iovs[0].iov_base = (char*)data_iov->iov_base;\n iovs[0].iov_len = (int)data_iov->iov_len;\n iovs++;\n }\n \n // chunk body eof.\n iovs[0].iov_base = (char*)SRS_HTTP_CRLF;\n iovs[0].iov_len = 2;\n iovs++;\n \n // sendout all ioves.\n ssize_t nwrite;\n if ((ret = srs_write_large_iovs(skt, iovss, nb_iovss, &nwrite)) != ERROR_SUCCESS) {\n return ret;\n }\n \n if (pnwrite) {\n *pnwrite = nwrite;\n }\n \n return ret;\n}\n\nvoid SrsHttpResponseWriter::write_header(int code)\n{\n if (header_wrote) {\n srs_warn(\"http: multiple write_header calls, code=%d\", code);\n return;\n }\n \n header_wrote = true;\n status = code;\n \n // parse the content length from header.\n content_length = hdr->content_length();\n}\n\nint SrsHttpResponseWriter::send_header(char* data, int size)\n{\n int ret = ERROR_SUCCESS;\n \n if (header_sent) {\n return ret;\n }\n header_sent = true;\n \n std::stringstream ss;\n \n // status_line\n ss << \"HTTP/1.1 \" << status << \" \"\n << srs_generate_http_status_text(status) << SRS_HTTP_CRLF;\n \n // detect content type\n if (srs_go_http_body_allowd(status)) {\n if (hdr->content_type().empty()) {\n hdr->set_content_type(srs_go_http_detect(data, size));\n }\n }\n \n // set server if not set.\n if (hdr->get(\"Server\").empty()) {\n hdr->set(\"Server\", RTMP_SIG_SRS_SERVER);\n }\n \n // chunked encoding\n if (content_length == -1) {\n hdr->set(\"Transfer-Encoding\", \"chunked\");\n }\n \n // keep alive to make vlc happy.\n hdr->set(\"Connection\", \"Keep-Alive\");\n \n // write headers\n hdr->write(ss);\n \n // header_eof\n ss << SRS_HTTP_CRLF;\n \n std::string buf = ss.str();\n return skt->write((void*)buf.c_str(), buf.length(), NULL);\n}\n\nSrsHttpResponseReader::SrsHttpResponseReader(SrsHttpMessage* msg, SrsStSocket* io)\n{\n skt = io;\n owner = msg;\n is_eof = false;\n nb_total_read = 0;\n nb_left_chunk = 0;\n buffer = NULL;\n}\n\nSrsHttpResponseReader::~SrsHttpResponseReader()\n{\n}\n\nint SrsHttpResponseReader::initialize(SrsFastBuffer* body)\n{\n int ret = ERROR_SUCCESS;\n \n nb_chunk = 0;\n nb_left_chunk = 0;\n nb_total_read = 0;\n buffer = body;\n \n return ret;\n}\n\nbool SrsHttpResponseReader::eof()\n{\n return is_eof;\n}\n\nint SrsHttpResponseReader::read(char* data, int nb_data, int* nb_read)\n{\n int ret = ERROR_SUCCESS;\n \n if (is_eof) {\n ret = ERROR_HTTP_RESPONSE_EOF;\n srs_error(\"http: response EOF. ret=%d\", ret);\n return ret;\n }\n \n // chunked encoding.\n if (owner->is_chunked()) {\n return read_chunked(data, nb_data, nb_read);\n }\n \n // read by specified content-length\n if (owner->content_length() != -1) {\n int max = (int)owner->content_length() - (int)nb_total_read;\n if (max <= 0) {\n is_eof = true;\n return ret;\n }\n \n // change the max to read.\n nb_data = srs_min(nb_data, max);\n return read_specified(data, nb_data, nb_read);\n }\n \n // infinite chunked mode, directly read.\n if (owner->is_infinite_chunked()) {\n srs_assert(!owner->is_chunked() && owner->content_length() == -1);\n return read_specified(data, nb_data, nb_read);\n }\n \n // infinite chunked mode, but user not set it,\n // we think there is no data left.\n is_eof = true;\n \n return ret;\n}\n\nint SrsHttpResponseReader::read_chunked(char* data, int nb_data, int* nb_read)\n{\n int ret = ERROR_SUCCESS;\n \n // when no bytes left in chunk,\n // parse the chunk length first.\n if (nb_left_chunk <= 0) {\n char* at = NULL;\n int length = 0;\n while (!at) {\n // find the CRLF of chunk header end.\n char* start = buffer->bytes();\n char* end = start + buffer->size();\n for (char* p = start; p < end - 1; p++) {\n if (p[0] == SRS_HTTP_CR && p[1] == SRS_HTTP_LF) {\n // invalid chunk, ignore.\n if (p == start) {\n ret = ERROR_HTTP_INVALID_CHUNK_HEADER;\n srs_error(\"chunk header start with CRLF. ret=%d\", ret);\n return ret;\n }\n length = (int)(p - start + 2);\n at = buffer->read_slice(length);\n break;\n }\n }\n \n // got at, ok.\n if (at) {\n break;\n }\n \n // when empty, only grow 1bytes, but the buffer will cache more.\n if ((ret = buffer->grow(skt, buffer->size() + 1)) != ERROR_SUCCESS) {\n if (!srs_is_client_gracefully_close(ret)) {\n srs_error(\"read body from server failed. ret=%d\", ret);\n }\n return ret;\n }\n }\n srs_assert(length >= 3);\n \n // it's ok to set the pos and pos+1 to NULL.\n at[length - 1] = 0;\n at[length - 2] = 0;\n \n // size is the bytes size, excludes the chunk header and end CRLF.\n int ilength = (int)::strtol(at, NULL, 16);\n if (ilength < 0) {\n ret = ERROR_HTTP_INVALID_CHUNK_HEADER;\n srs_error(\"chunk header negative, length=%d. ret=%d\", ilength, ret);\n return ret;\n }\n \n // all bytes in chunk is left now.\n nb_chunk = nb_left_chunk = ilength;\n }\n \n if (nb_chunk <= 0) {\n // for the last chunk, eof.\n is_eof = true;\n } else {\n // for not the last chunk, there must always exists bytes.\n // left bytes in chunk, read some.\n srs_assert(nb_left_chunk);\n \n int nb_bytes = srs_min(nb_left_chunk, nb_data);\n ret = read_specified(data, nb_bytes, &nb_bytes);\n \n // the nb_bytes used for output already read size of bytes.\n if (nb_read) {\n *nb_read = nb_bytes;\n }\n nb_left_chunk -= nb_bytes;\n srs_info(\"http: read %d bytes of chunk\", nb_bytes);\n \n // error or still left bytes in chunk, ignore and read in future.\n if (nb_left_chunk > 0 || (ret != ERROR_SUCCESS)) {\n return ret;\n }\n srs_info(\"http: read total chunk %dB\", nb_chunk);\n }\n \n // for both the last or not, the CRLF of chunk payload end.\n if ((ret = buffer->grow(skt, 2)) != ERROR_SUCCESS) {\n if (!srs_is_client_gracefully_close(ret)) {\n srs_error(\"read EOF of chunk from server failed. ret=%d\", ret);\n }\n return ret;\n }\n buffer->read_slice(2);\n \n return ret;\n}\n\nint SrsHttpResponseReader::read_specified(char* data, int nb_data, int* nb_read)\n{\n int ret = ERROR_SUCCESS;\n \n if (buffer->size() <= 0) {\n // when empty, only grow 1bytes, but the buffer will cache more.\n if ((ret = buffer->grow(skt, 1)) != ERROR_SUCCESS) {\n if (!srs_is_client_gracefully_close(ret)) {\n srs_error(\"read body from server failed. ret=%d\", ret);\n }\n return ret;\n }\n }\n \n int nb_bytes = srs_min(nb_data, buffer->size());\n \n // read data to buffer.\n srs_assert(nb_bytes);\n char* p = buffer->read_slice(nb_bytes);\n memcpy(data, p, nb_bytes);\n if (nb_read) {\n *nb_read = nb_bytes;\n }\n \n // increase the total read to determine whether EOF.\n nb_total_read += nb_bytes;\n \n // for not chunked and specified content length.\n if (!owner->is_chunked() && owner->content_length() != -1) {\n // when read completed, eof.\n if (nb_total_read >= (int)owner->content_length()) {\n is_eof = true;\n }\n }\n \n return ret;\n}\n\nSrsHttpMessage::SrsHttpMessage(SrsStSocket* io, SrsConnection* c) : ISrsHttpMessage()\n{\n conn = c;\n chunked = false;\n infinite_chunked = false;\n keep_alive = true;\n _uri = new SrsHttpUri();\n _body = new SrsHttpResponseReader(this, io);\n _http_ts_send_buffer = new char[SRS_HTTP_TS_SEND_BUFFER_SIZE];\n jsonp = false;\n}\n\nSrsHttpMessage::~SrsHttpMessage()\n{\n srs_freep(_body);\n srs_freep(_uri);\n srs_freepa(_http_ts_send_buffer);\n}\n\nint SrsHttpMessage::update(string url, http_parser* header, SrsFastBuffer* body, vector& headers)\n{\n int ret = ERROR_SUCCESS;\n \n _url = url;\n _header = *header;\n _headers = headers;\n \n // whether chunked.\n std::string transfer_encoding = get_request_header(\"Transfer-Encoding\");\n chunked = (transfer_encoding == \"chunked\");\n \n // whether keep alive.\n keep_alive = http_should_keep_alive(header);\n \n // set the buffer.\n if ((ret = _body->initialize(body)) != ERROR_SUCCESS) {\n return ret;\n }\n \n // parse uri from url.\n std::string host = get_request_header(\"Host\");\n \n // use server public ip when no host specified.\n // to make telnet happy.\n if (host.empty()) {\n host= srs_get_public_internet_address();\n }\n \n // parse uri to schema/server:port/path?query\n std::string uri = \"http://\" + host + _url;\n if ((ret = _uri->initialize(uri)) != ERROR_SUCCESS) {\n return ret;\n }\n \n // must format as key=value&...&keyN=valueN\n std::string q = _uri->get_query();\n size_t pos = string::npos;\n while (!q.empty()) {\n std::string k = q;\n if ((pos = q.find(\"=\")) != string::npos) {\n k = q.substr(0, pos);\n q = q.substr(pos + 1);\n } else {\n q = \"\";\n }\n \n std::string v = q;\n if ((pos = q.find(\"&\")) != string::npos) {\n v = q.substr(0, pos);\n q = q.substr(pos + 1);\n } else {\n q = \"\";\n }\n \n _query[k] = v;\n }\n \n // parse ext.\n _ext = _uri->get_path();\n if ((pos = _ext.rfind(\".\")) != string::npos) {\n _ext = _ext.substr(pos);\n } else {\n _ext = \"\";\n }\n \n // parse jsonp request message.\n if (!query_get(\"callback\").empty()) {\n jsonp = true;\n }\n if (jsonp) {\n jsonp_method = query_get(\"method\");\n }\n \n return ret;\n}\n\nSrsConnection* SrsHttpMessage::connection()\n{\n return conn;\n}\n\nu_int8_t SrsHttpMessage::method()\n{\n if (jsonp && !jsonp_method.empty()) {\n if (jsonp_method == \"GET\") {\n return SRS_CONSTS_HTTP_GET;\n } else if (jsonp_method == \"PUT\") {\n return SRS_CONSTS_HTTP_PUT;\n } else if (jsonp_method == \"POST\") {\n return SRS_CONSTS_HTTP_POST;\n } else if (jsonp_method == \"DELETE\") {\n return SRS_CONSTS_HTTP_DELETE;\n }\n }\n \n return (u_int8_t)_header.method;\n}\n\nu_int16_t SrsHttpMessage::status_code()\n{\n return (u_int16_t)_header.status_code;\n}\n\nstring SrsHttpMessage::method_str()\n{\n if (jsonp && !jsonp_method.empty()) {\n return jsonp_method;\n }\n \n if (is_http_get()) {\n return \"GET\";\n }\n if (is_http_put()) {\n return \"PUT\";\n }\n if (is_http_post()) {\n return \"POST\";\n }\n if (is_http_delete()) {\n return \"DELETE\";\n }\n if (is_http_options()) {\n return \"OPTIONS\";\n }\n \n return \"OTHER\";\n}\n\nbool SrsHttpMessage::is_http_get()\n{\n return method() == SRS_CONSTS_HTTP_GET;\n}\n\nbool SrsHttpMessage::is_http_put()\n{\n return method() == SRS_CONSTS_HTTP_PUT;\n}\n\nbool SrsHttpMessage::is_http_post()\n{\n return method() == SRS_CONSTS_HTTP_POST;\n}\n\nbool SrsHttpMessage::is_http_delete()\n{\n return method() == SRS_CONSTS_HTTP_DELETE;\n}\n\nbool SrsHttpMessage::is_http_options()\n{\n return _header.method == SRS_CONSTS_HTTP_OPTIONS;\n}\n\nbool SrsHttpMessage::is_chunked()\n{\n return chunked;\n}\n\nbool SrsHttpMessage::is_keep_alive()\n{\n return keep_alive;\n}\n\nbool SrsHttpMessage::is_infinite_chunked()\n{\n return infinite_chunked;\n}\n\nstring SrsHttpMessage::uri()\n{\n std::string uri = _uri->get_schema();\n if (uri.empty()) {\n uri += \"http\";\n }\n uri += \"://\";\n \n uri += host();\n uri += path();\n \n return uri;\n}\n\nstring SrsHttpMessage::url()\n{\n return _uri->get_url();\n}\n\nstring SrsHttpMessage::host()\n{\n return _uri->get_host();\n}\n\nstring SrsHttpMessage::path()\n{\n return _uri->get_path();\n}\n\nstring SrsHttpMessage::query()\n{\n return _uri->get_query();\n}\n\nstring SrsHttpMessage::ext()\n{\n return _ext;\n}\n\nint SrsHttpMessage::parse_rest_id(string pattern)\n{\n string p = _uri->get_path();\n if (p.length() <= pattern.length()) {\n return -1;\n }\n \n string id = p.substr((int)pattern.length());\n if (!id.empty()) {\n return ::atoi(id.c_str());\n }\n \n return -1;\n}\n\nint SrsHttpMessage::parse_rest_str(std::string pattern, std::string& req)\n{\n int ret = ERROR_SUCCESS;\n\t\n string p = _uri->get_path();\n if (p.length() <= pattern.length()) {\n return -1;\n }\n \n req = p.substr((int)(pattern.length() - 1));\n if (req.empty()) {\n return -1;\n }\n \n return ret;\t\n}\n\nint SrsHttpMessage::enter_infinite_chunked()\n{\n int ret = ERROR_SUCCESS;\n \n if (infinite_chunked) {\n return ret;\n }\n \n if (is_chunked() || content_length() != -1) {\n ret = ERROR_HTTP_DATA_INVALID;\n srs_error(\"infinite chunkted not supported in specified codec. ret=%d\", ret);\n return ret;\n }\n \n infinite_chunked = true;\n \n return ret;\n}\n\nint SrsHttpMessage::body_read_all(string& body)\n{\n int ret = ERROR_SUCCESS;\n \n // cache to read.\n char* buf = new char[SRS_HTTP_READ_CACHE_BYTES];\n SrsAutoFreeA(char, buf);\n \n // whatever, read util EOF.\n while (!_body->eof()) {\n int nb_read = 0;\n if ((ret = _body->read(buf, SRS_HTTP_READ_CACHE_BYTES, &nb_read)) != ERROR_SUCCESS) {\n return ret;\n }\n \n if (nb_read > 0) {\n body.append(buf, nb_read);\n }\n }\n \n return ret;\n}\n\nISrsHttpResponseReader* SrsHttpMessage::body_reader()\n{\n return _body;\n}\n\nint64_t SrsHttpMessage::content_length()\n{\n return _header.content_length;\n}\n\nstring SrsHttpMessage::query_get(string key)\n{\n std::string v;\n \n if (_query.find(key) != _query.end()) {\n v = _query[key];\n }\n \n return v;\n}\n\nint SrsHttpMessage::request_header_count()\n{\n return (int)_headers.size();\n}\n\nstring SrsHttpMessage::request_header_key_at(int index)\n{\n srs_assert(index < request_header_count());\n SrsHttpHeaderField item = _headers[index];\n return item.first;\n}\n\nstring SrsHttpMessage::request_header_value_at(int index)\n{\n srs_assert(index < request_header_count());\n SrsHttpHeaderField item = _headers[index];\n return item.second;\n}\n\nstring SrsHttpMessage::get_request_header(string name)\n{\n std::vector::iterator it;\n \n for (it = _headers.begin(); it != _headers.end(); ++it) {\n SrsHttpHeaderField& elem = *it;\n std::string key = elem.first;\n std::string value = elem.second;\n if (key == name) {\n return value;\n }\n }\n \n return \"\";\n}\n\nSrsRequest* SrsHttpMessage::to_request(string vhost)\n{\n SrsRequest* req = new SrsRequest();\n \n req->app = _uri->get_path();\n size_t pos = string::npos;\n if ((pos = req->app.rfind(\"/\")) != string::npos) {\n req->stream = req->app.substr(pos + 1);\n req->app = req->app.substr(0, pos);\n }\n if ((pos = req->stream.rfind(\".\")) != string::npos) {\n req->stream = req->stream.substr(0, pos);\n }\n \n req->tcUrl = \"rtmp://\" + vhost + req->app;\n req->pageUrl = get_request_header(\"Referer\");\n req->objectEncoding = 0;\n \n srs_discovery_tc_url(req->tcUrl,\n req->schema, req->host, req->vhost, req->app, req->port,\n req->param);\n req->strip();\n \n return req;\n}\n\nbool SrsHttpMessage::is_jsonp()\n{\n return jsonp;\n}\n\nSrsHttpParser::SrsHttpParser()\n{\n buffer = new SrsFastBuffer();\n}\n\nSrsHttpParser::~SrsHttpParser()\n{\n srs_freep(buffer);\n}\n\nint SrsHttpParser::initialize(enum http_parser_type type)\n{\n int ret = ERROR_SUCCESS;\n \n memset(&settings, 0, sizeof(settings));\n settings.on_message_begin = on_message_begin;\n settings.on_url = on_url;\n settings.on_header_field = on_header_field;\n settings.on_header_value = on_header_value;\n settings.on_headers_complete = on_headers_complete;\n settings.on_body = on_body;\n settings.on_message_complete = on_message_complete;\n \n http_parser_init(&parser, type);\n // callback object ptr.\n parser.data = (void*)this;\n \n return ret;\n}\n\nint SrsHttpParser::parse_message(SrsStSocket* skt, SrsConnection* conn, ISrsHttpMessage** ppmsg)\n{\n *ppmsg = NULL;\n \n int ret = ERROR_SUCCESS;\n \n // reset request data.\n field_name = \"\";\n field_value = \"\";\n expect_field_name = true;\n state = SrsHttpParseStateInit;\n header = http_parser();\n url = \"\";\n headers.clear();\n header_parsed = 0;\n \n // do parse\n if ((ret = parse_message_imp(skt)) != ERROR_SUCCESS) {\n if (!srs_is_client_gracefully_close(ret)) {\n srs_error(\"parse http msg failed. ret=%d\", ret);\n }\n return ret;\n }\n \n // create msg\n SrsHttpMessage* msg = new SrsHttpMessage(skt, conn);\n \n // initalize http msg, parse url.\n if ((ret = msg->update(url, &header, buffer, headers)) != ERROR_SUCCESS) {\n srs_error(\"initialize http msg failed. ret=%d\", ret);\n srs_freep(msg);\n return ret;\n }\n \n // parse ok, return the msg.\n *ppmsg = msg;\n \n return ret;\n}\n\nint SrsHttpParser::parse_message_imp(SrsStSocket* skt)\n{\n int ret = ERROR_SUCCESS;\n \n while (true) {\n ssize_t nparsed = 0;\n \n // when got entire http header, parse it.\n // @see https://github.com/ossrs/srs/issues/400\n char* start = buffer->bytes();\n char* end = start + buffer->size();\n for (char* p = start; p <= end - 4; p++) {\n // SRS_HTTP_CRLFCRLF \"\\r\\n\\r\\n\" // 0x0D0A0D0A\n if (p[0] == SRS_CONSTS_CR && p[1] == SRS_CONSTS_LF && p[2] == SRS_CONSTS_CR && p[3] == SRS_CONSTS_LF) {\n nparsed = http_parser_execute(&parser, &settings, buffer->bytes(), buffer->size());\n srs_info(\"buffer=%d, nparsed=%d, header=%d\", buffer->size(), (int)nparsed, header_parsed);\n break;\n }\n }\n \n // consume the parsed bytes.\n if (nparsed && header_parsed) {\n buffer->read_slice(header_parsed);\n }\n \n // ok atleast header completed,\n // never wait for body completed, for maybe chunked.\n if (state == SrsHttpParseStateHeaderComplete || state == SrsHttpParseStateMessageComplete) {\n break;\n }\n \n // when nothing parsed, read more to parse.\n if (nparsed == 0) {\n // when requires more, only grow 1bytes, but the buffer will cache more.\n if ((ret = buffer->grow(skt, buffer->size() + 1)) != ERROR_SUCCESS) {\n if (!srs_is_client_gracefully_close(ret)) {\n srs_error(\"read body from server failed. ret=%d\", ret);\n }\n return ret;\n }\n }\n }\n \n // parse last header.\n if (!field_name.empty() && !field_value.empty()) {\n headers.push_back(std::make_pair(field_name, field_value));\n }\n \n return ret;\n}\n\nint SrsHttpParser::on_message_begin(http_parser* parser)\n{\n SrsHttpParser* obj = (SrsHttpParser*)parser->data;\n srs_assert(obj);\n \n obj->state = SrsHttpParseStateStart;\n \n srs_info(\"***MESSAGE BEGIN***\");\n \n return 0;\n}\n\nint SrsHttpParser::on_headers_complete(http_parser* parser)\n{\n SrsHttpParser* obj = (SrsHttpParser*)parser->data;\n srs_assert(obj);\n \n obj->header = *parser;\n // save the parser when header parse completed.\n obj->state = SrsHttpParseStateHeaderComplete;\n obj->header_parsed = (int)parser->nread;\n \n srs_info(\"***HEADERS COMPLETE***\");\n \n // see http_parser.c:1570, return 1 to skip body.\n return 0;\n}\n\nint SrsHttpParser::on_message_complete(http_parser* parser)\n{\n SrsHttpParser* obj = (SrsHttpParser*)parser->data;\n srs_assert(obj);\n \n // save the parser when body parse completed.\n obj->state = SrsHttpParseStateMessageComplete;\n \n srs_info(\"***MESSAGE COMPLETE***\\n\");\n \n return 0;\n}\n\nint SrsHttpParser::on_url(http_parser* parser, const char* at, size_t length)\n{\n SrsHttpParser* obj = (SrsHttpParser*)parser->data;\n srs_assert(obj);\n \n if (length > 0) {\n obj->url.append(at, (int)length);\n }\n \n srs_info(\"Method: %d, Url: %.*s\", parser->method, (int)length, at);\n \n return 0;\n}\n\nint SrsHttpParser::on_header_field(http_parser* parser, const char* at, size_t length)\n{\n SrsHttpParser* obj = (SrsHttpParser*)parser->data;\n srs_assert(obj);\n \n // field value=>name, reap the field.\n if (!obj->expect_field_name) {\n obj->headers.push_back(std::make_pair(obj->field_name, obj->field_value));\n \n // reset the field name when parsed.\n obj->field_name = \"\";\n obj->field_value = \"\";\n }\n obj->expect_field_name = true;\n \n if (length > 0) {\n obj->field_name.append(at, (int)length);\n }\n \n srs_info(\"Header field(%d bytes): %.*s\", (int)length, (int)length, at);\n return 0;\n}\n\nint SrsHttpParser::on_header_value(http_parser* parser, const char* at, size_t length)\n{\n SrsHttpParser* obj = (SrsHttpParser*)parser->data;\n srs_assert(obj);\n \n if (length > 0) {\n obj->field_value.append(at, (int)length);\n }\n obj->expect_field_name = false;\n \n srs_info(\"Header value(%d bytes): %.*s\", (int)length, (int)length, at);\n return 0;\n}\n\nint SrsHttpParser::on_body(http_parser* parser, const char* at, size_t length)\n{\n SrsHttpParser* obj = (SrsHttpParser*)parser->data;\n srs_assert(obj);\n \n srs_info(\"Body: %.*s\", (int)length, at);\n \n return 0;\n}\n\nSrsHttpUri::SrsHttpUri()\n{\n port = SRS_DEFAULT_HTTP_PORT;\n}\n\nSrsHttpUri::~SrsHttpUri()\n{\n}\n\nint SrsHttpUri::initialize(string _url)\n{\n int ret = ERROR_SUCCESS;\n \n url = _url;\n const char* purl = url.c_str();\n \n http_parser_url hp_u;\n if((ret = http_parser_parse_url(purl, url.length(), 0, &hp_u)) != 0){\n int code = ret;\n ret = ERROR_HTTP_PARSE_URI;\n \n srs_error(\"parse url %s failed, code=%d, ret=%d\", purl, code, ret);\n return ret;\n }\n \n std::string field = get_uri_field(url, &hp_u, UF_SCHEMA);\n if(!field.empty()){\n schema = field;\n }\n \n host = get_uri_field(url, &hp_u, UF_HOST);\n \n field = get_uri_field(url, &hp_u, UF_PORT);\n if(!field.empty()){\n port = atoi(field.c_str());\n }\n \n path = get_uri_field(url, &hp_u, UF_PATH);\n srs_info(\"parse url %s success\", purl);\n \n query = get_uri_field(url, &hp_u, UF_QUERY);\n srs_info(\"parse query %s success\", query.c_str());\n \n return ret;\n}\n\nconst char* SrsHttpUri::get_url()\n{\n return url.data();\n}\n\nconst char* SrsHttpUri::get_schema()\n{\n return schema.data();\n}\n\nconst char* SrsHttpUri::get_host()\n{\n return host.data();\n}\n\nint SrsHttpUri::get_port()\n{\n return port;\n}\n\nconst char* SrsHttpUri::get_path()\n{\n return path.data();\n}\n\nconst char* SrsHttpUri::get_query()\n{\n return query.data();\n}\n\nstring SrsHttpUri::get_uri_field(string uri, http_parser_url* hp_u, http_parser_url_fields field)\n{\n if((hp_u->field_set & (1 << field)) == 0){\n return \"\";\n }\n \n srs_verbose(\"uri field matched, off=%d, len=%d, value=%.*s\",\n hp_u->field_data[field].off,\n hp_u->field_data[field].len,\n hp_u->field_data[field].len,\n uri.c_str() + hp_u->field_data[field].off);\n \n int offset = hp_u->field_data[field].off;\n int len = hp_u->field_data[field].len;\n \n return uri.substr(offset, len);\n}\n\nSrsHttpConn::SrsHttpConn(IConnectionManager* cm, st_netfd_t fd, ISrsHttpServeMux* m)\n : SrsConnection(cm, fd)\n{\n parser = new SrsHttpParser();\n http_mux = m;\n}\n\nSrsHttpConn::~SrsHttpConn()\n{\n srs_freep(parser);\n}\n\nvoid SrsHttpConn::resample()\n{\n // TODO: FIXME: implements it\n}\n\nint64_t SrsHttpConn::get_send_bytes_delta()\n{\n // TODO: FIXME: implements it\n return 0;\n}\n\nint64_t SrsHttpConn::get_recv_bytes_delta()\n{\n // TODO: FIXME: implements it\n return 0;\n}\n\nvoid SrsHttpConn::cleanup()\n{\n // TODO: FIXME: implements it\n}\n\nint SrsHttpConn::do_cycle()\n{\n int ret = ERROR_SUCCESS;\n \n srs_trace(\"HTTP client ip=%s\", ip.c_str());\n \n // initialize parser\n if ((ret = parser->initialize(HTTP_REQUEST)) != ERROR_SUCCESS) {\n srs_error(\"http initialize http parser failed. ret=%d\", ret);\n return ret;\n }\n \n // underlayer socket\n SrsStSocket skt(stfd);\n \n // set the recv timeout, for some clients never disconnect the connection.\n // @see https://github.com/ossrs/srs/issues/398\n skt.set_recv_timeout(SRS_HTTP_RECV_TIMEOUT_US);\n \n // process http messages.\n while (!disposed) {\n ISrsHttpMessage* req = NULL;\n \n // get a http message\n if ((ret = parser->parse_message(&skt, this, &req)) != ERROR_SUCCESS) {\n return ret;\n }\n\n // if SUCCESS, always NOT-NULL.\n srs_assert(req);\n \n // always free it in this scope.\n SrsAutoFree(ISrsHttpMessage, req);\n \n // may should discard the body.\n if ((ret = on_got_http_message(req)) != ERROR_SUCCESS) {\n return ret;\n }\n \n // ok, handle http request.\n SrsHttpResponseWriter writer(&skt);\n if ((ret = process_request(&writer, req)) != ERROR_SUCCESS) {\n return ret;\n }\n \n // donot keep alive, disconnect it.\n // @see https://github.com/ossrs/srs/issues/399\n if (!req->is_keep_alive()) {\n break;\n }\n }\n \n return ret;\n}\n\nint SrsHttpConn::process_request(ISrsHttpResponseWriter* w, ISrsHttpMessage* r) \n{\n int ret = ERROR_SUCCESS;\n \n srs_trace(\"HTTP %s %s, content-length=%\"PRId64\"\", \n r->method_str().c_str(), r->url().c_str(), r->content_length());\n \n // use default server mux to serve http request.\n if ((ret = http_mux->serve_http(w, r)) != ERROR_SUCCESS) {\n if (!srs_is_client_gracefully_close(ret)) {\n srs_error(\"serve http msg failed. ret=%d\", ret);\n }\n return ret;\n }\n \n return ret;\n}\n\nSrsResponseOnlyHttpConn::SrsResponseOnlyHttpConn(IConnectionManager* cm, st_netfd_t fd, ISrsHttpServeMux* m)\n : SrsHttpConn(cm, fd, m)\n{\n}\n\nSrsResponseOnlyHttpConn::~SrsResponseOnlyHttpConn()\n{\n}\n\nint SrsResponseOnlyHttpConn::on_got_http_message(ISrsHttpMessage* msg)\n{\n int ret = ERROR_SUCCESS;\n \n ISrsHttpResponseReader* br = msg->body_reader();\n \n // drop all request body.\n while (!br->eof()) {\n char body[4096];\n if ((ret = br->read(body, 4096, NULL)) != ERROR_SUCCESS) {\n return ret;\n }\n }\n \n return ret;\n}\n\nSrsHttpServer::SrsHttpServer(SrsServer* svr)\n{\n server = svr;\n http_stream = new SrsHttpStreamServer(svr);\n http_static = new SrsHttpStaticServer(svr);\n}\n\nSrsHttpServer::~SrsHttpServer()\n{\n srs_freep(http_stream);\n srs_freep(http_static);\n}\n\nint SrsHttpServer::initialize()\n{\n int ret = ERROR_SUCCESS;\n \n#if defined(SRS_AUTO_HTTP_SERVER) && defined(SRS_AUTO_HTTP_API)\n // for SRS go-sharp to detect the status of HTTP server of SRS HTTP FLV Cluster.\n if ((ret = http_static->mux.handle(\"/api/v1/versions\", new SrsGoApiVersion())) != ERROR_SUCCESS) {\n return ret;\n }\n#endif\n \n if ((ret = http_stream->initialize()) != ERROR_SUCCESS) {\n return ret;\n }\n \n if ((ret = http_static->initialize()) != ERROR_SUCCESS) {\n return ret;\n }\n \n return ret;\n}\n\nint SrsHttpServer::serve_http(ISrsHttpResponseWriter* w, ISrsHttpMessage* r)\n{\n // try http stream first.\n if (http_stream->mux.can_serve(r)) {\n return http_stream->mux.serve_http(w, r);\n }\n \n return http_static->mux.serve_http(w, r);\n}\n\nint SrsHttpServer::http_mount(SrsSource* s, SrsRequest* r)\n{\n return http_stream->http_mount(s, r);\n}\n\nvoid SrsHttpServer::http_unmount(SrsSource* s, SrsRequest* r)\n{\n http_stream->http_unmount(s, r);\n}\n\nint SrsHttpServer::mount_hls(SrsRequest* r)\n{\n return http_stream->mount_hls(r);\n}\n\nint SrsHttpServer::hls_update_m3u8(SrsRequest* r, std::string m3u8)\n{\n return http_stream->hls_update_m3u8(r, m3u8);\n}\n\nint SrsHttpServer::hls_update_ts(SrsRequest* r, std::string uri, std::string ts)\n{\n return http_stream->hls_update_ts(r, uri, ts);\n}\n\nint SrsHttpServer::hls_remove_ts(SrsRequest* r, std::string uri)\n{\n return http_stream->hls_remove_ts(r, uri);\n}\n\nvoid SrsHttpServer::unmount_hls(SrsRequest* r)\n{\n http_stream->unmount_hls(r);\n}\n\n#endif\n\n", "meta": {"content_hash": "3f29c672a4cfca6217a200173361dbbb", "timestamp": "", "source": "github", "line_count": 1407, "max_line_length": 117, "avg_line_length": 24.26226012793177, "alnum_prop": 0.5610920701877729, "repo_name": "wangcy6/storm_app", "id": "5f1096f8e9c3bed5f39751201e93be20ab6b9a7d", "size": "35224", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "frame/c++/srs-2.0release/trunk/src/app/srs_app_http_conn.cpp", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ActionScript", "bytes": "86225"}, {"name": "Assembly", "bytes": "4834"}, {"name": "Batchfile", "bytes": "50141"}, {"name": "C", "bytes": "9700081"}, {"name": "C#", "bytes": "1587148"}, {"name": "C++", "bytes": "14378340"}, {"name": "CMake", "bytes": "756439"}, {"name": "CSS", "bytes": "59712"}, {"name": "Clojure", "bytes": "535480"}, {"name": "DTrace", "bytes": "147"}, {"name": "Fancy", "bytes": "6234"}, {"name": "FreeMarker", "bytes": "3512"}, {"name": "Go", "bytes": "27069"}, {"name": "Groovy", "bytes": "1755"}, {"name": "HTML", "bytes": "1235479"}, {"name": "Java", "bytes": "41653938"}, {"name": "JavaScript", "bytes": "260093"}, {"name": "Lua", "bytes": "11887"}, {"name": "M4", "bytes": "96283"}, {"name": "Makefile", "bytes": "977879"}, {"name": "NSIS", "bytes": "6522"}, {"name": "Objective-C", "bytes": "324010"}, {"name": "PHP", "bytes": "348909"}, {"name": "Perl", "bytes": "182487"}, {"name": "PowerShell", "bytes": "19465"}, {"name": "Prolog", "bytes": "243"}, {"name": "Python", "bytes": "3649738"}, {"name": "QML", "bytes": "9975"}, {"name": "QMake", "bytes": "63106"}, {"name": "Roff", "bytes": "12319"}, {"name": "Ruby", "bytes": "858066"}, {"name": "Scala", "bytes": "5203874"}, {"name": "Shell", "bytes": "714435"}, {"name": "Smarty", "bytes": "1047"}, {"name": "Swift", "bytes": "3486"}, {"name": "Tcl", "bytes": "492616"}, {"name": "Thrift", "bytes": "31449"}, {"name": "XS", "bytes": "20183"}, {"name": "XSLT", "bytes": "8784"}]}} {"text": "\n\n@property (strong, nonatomic) UIWindow *window;\n\n@property (strong, nonatomic) DVSlideViewController *viewController;\n\n@end\n", "meta": {"content_hash": "da256218503a3138dfc74336ab869fed", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 68, "avg_line_length": 24.11111111111111, "alnum_prop": 0.8110599078341014, "repo_name": "dickverbunt/DVSlideViewController", "id": "0dbbfa4a8fe0378a77819684dc726b5500fdbac3", "size": "398", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "DVSlideViewController/AppDelegate.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Objective-C", "bytes": "13207"}]}} {"text": "\n\npackage lifecycle\n\nimport (\n\t\"fmt\"\n\t\"net\"\n\t\"net/http\"\n\t\"strconv\"\n\n\t\"k8s.io/api/core/v1\"\n\t\"k8s.io/apimachinery/pkg/types\"\n\t\"k8s.io/apimachinery/pkg/util/intstr\"\n\t\"k8s.io/klog\"\n\tkubecontainer \"k8s.io/kubernetes/pkg/kubelet/container\"\n\tkubetypes \"k8s.io/kubernetes/pkg/kubelet/types\"\n\t\"k8s.io/kubernetes/pkg/kubelet/util/format\"\n\t\"k8s.io/kubernetes/pkg/security/apparmor\"\n\tutilio \"k8s.io/utils/io\"\n)\n\nconst (\n\tmaxRespBodyLength = 10 * 1 << 10 // 10KB\n)\n\ntype HandlerRunner struct {\n\thttpGetter kubetypes.HttpGetter\n\tcommandRunner kubecontainer.ContainerCommandRunner\n\tcontainerManager podStatusProvider\n}\n\ntype podStatusProvider interface {\n\tGetPodStatus(uid types.UID, name, namespace string) (*kubecontainer.PodStatus, error)\n}\n\nfunc NewHandlerRunner(httpGetter kubetypes.HttpGetter, commandRunner kubecontainer.ContainerCommandRunner, containerManager podStatusProvider) kubecontainer.HandlerRunner {\n\treturn &HandlerRunner{\n\t\thttpGetter: httpGetter,\n\t\tcommandRunner: commandRunner,\n\t\tcontainerManager: containerManager,\n\t}\n}\n\nfunc (hr *HandlerRunner) Run(containerID kubecontainer.ContainerID, pod *v1.Pod, container *v1.Container, handler *v1.Handler) (string, error) {\n\tswitch {\n\tcase handler.Exec != nil:\n\t\tvar msg string\n\t\t// TODO(tallclair): Pass a proper timeout value.\n\t\toutput, err := hr.commandRunner.RunInContainer(containerID, handler.Exec.Command, 0)\n\t\tif err != nil {\n\t\t\tmsg = fmt.Sprintf(\"Exec lifecycle hook (%v) for Container %q in Pod %q failed - error: %v, message: %q\", handler.Exec.Command, container.Name, format.Pod(pod), err, string(output))\n\t\t\tklog.V(1).Infof(msg)\n\t\t}\n\t\treturn msg, err\n\tcase handler.HTTPGet != nil:\n\t\tmsg, err := hr.runHTTPHandler(pod, container, handler)\n\t\tif err != nil {\n\t\t\tmsg = fmt.Sprintf(\"Http lifecycle hook (%s) for Container %q in Pod %q failed - error: %v, message: %q\", handler.HTTPGet.Path, container.Name, format.Pod(pod), err, msg)\n\t\t\tklog.V(1).Infof(msg)\n\t\t}\n\t\treturn msg, err\n\tdefault:\n\t\terr := fmt.Errorf(\"Invalid handler: %v\", handler)\n\t\tmsg := fmt.Sprintf(\"Cannot run handler: %v\", err)\n\t\tklog.Errorf(msg)\n\t\treturn msg, err\n\t}\n}\n\n// resolvePort attempts to turn an IntOrString port reference into a concrete port number.\n// If portReference has an int value, it is treated as a literal, and simply returns that value.\n// If portReference is a string, an attempt is first made to parse it as an integer. If that fails,\n// an attempt is made to find a port with the same name in the container spec.\n// If a port with the same name is found, it's ContainerPort value is returned. If no matching\n// port is found, an error is returned.\nfunc resolvePort(portReference intstr.IntOrString, container *v1.Container) (int, error) {\n\tif portReference.Type == intstr.Int {\n\t\treturn portReference.IntValue(), nil\n\t}\n\tportName := portReference.StrVal\n\tport, err := strconv.Atoi(portName)\n\tif err == nil {\n\t\treturn port, nil\n\t}\n\tfor _, portSpec := range container.Ports {\n\t\tif portSpec.Name == portName {\n\t\t\treturn int(portSpec.ContainerPort), nil\n\t\t}\n\t}\n\treturn -1, fmt.Errorf(\"couldn't find port: %v in %v\", portReference, container)\n}\n\nfunc (hr *HandlerRunner) runHTTPHandler(pod *v1.Pod, container *v1.Container, handler *v1.Handler) (string, error) {\n\thost := handler.HTTPGet.Host\n\tif len(host) == 0 {\n\t\tstatus, err := hr.containerManager.GetPodStatus(pod.UID, pod.Name, pod.Namespace)\n\t\tif err != nil {\n\t\t\tklog.Errorf(\"Unable to get pod info, event handlers may be invalid.\")\n\t\t\treturn \"\", err\n\t\t}\n\t\tif len(status.IPs) == 0 {\n\t\t\treturn \"\", fmt.Errorf(\"failed to find networking container: %v\", status)\n\t\t}\n\t\thost = status.IPs[0]\n\t}\n\tvar port int\n\tif handler.HTTPGet.Port.Type == intstr.String && len(handler.HTTPGet.Port.StrVal) == 0 {\n\t\tport = 80\n\t} else {\n\t\tvar err error\n\t\tport, err = resolvePort(handler.HTTPGet.Port, container)\n\t\tif err != nil {\n\t\t\treturn \"\", err\n\t\t}\n\t}\n\turl := fmt.Sprintf(\"http://%s/%s\", net.JoinHostPort(host, strconv.Itoa(port)), handler.HTTPGet.Path)\n\tresp, err := hr.httpGetter.Get(url)\n\treturn getHttpRespBody(resp), err\n}\n\nfunc getHttpRespBody(resp *http.Response) string {\n\tif resp == nil {\n\t\treturn \"\"\n\t}\n\tdefer resp.Body.Close()\n\tbytes, err := utilio.ReadAtMost(resp.Body, maxRespBodyLength)\n\tif err == nil || err == utilio.ErrLimitReached {\n\t\treturn string(bytes)\n\t}\n\treturn \"\"\n}\n\nfunc NewAppArmorAdmitHandler(validator apparmor.Validator) PodAdmitHandler {\n\treturn &appArmorAdmitHandler{\n\t\tValidator: validator,\n\t}\n}\n\ntype appArmorAdmitHandler struct {\n\tapparmor.Validator\n}\n\nfunc (a *appArmorAdmitHandler) Admit(attrs *PodAdmitAttributes) PodAdmitResult {\n\t// If the pod is already running or terminated, no need to recheck AppArmor.\n\tif attrs.Pod.Status.Phase != v1.PodPending {\n\t\treturn PodAdmitResult{Admit: true}\n\t}\n\n\terr := a.Validate(attrs.Pod)\n\tif err == nil {\n\t\treturn PodAdmitResult{Admit: true}\n\t}\n\treturn PodAdmitResult{\n\t\tAdmit: false,\n\t\tReason: \"AppArmor\",\n\t\tMessage: fmt.Sprintf(\"Cannot enforce AppArmor: %v\", err),\n\t}\n}\n\nfunc NewNoNewPrivsAdmitHandler(runtime kubecontainer.Runtime) PodAdmitHandler {\n\treturn &noNewPrivsAdmitHandler{\n\t\tRuntime: runtime,\n\t}\n}\n\ntype noNewPrivsAdmitHandler struct {\n\tkubecontainer.Runtime\n}\n\nfunc (a *noNewPrivsAdmitHandler) Admit(attrs *PodAdmitAttributes) PodAdmitResult {\n\t// If the pod is already running or terminated, no need to recheck NoNewPrivs.\n\tif attrs.Pod.Status.Phase != v1.PodPending {\n\t\treturn PodAdmitResult{Admit: true}\n\t}\n\n\t// If the containers in a pod do not require no-new-privs, admit it.\n\tif !noNewPrivsRequired(attrs.Pod) {\n\t\treturn PodAdmitResult{Admit: true}\n\t}\n\n\t// Always admit runtimes except docker.\n\tif a.Runtime.Type() != kubetypes.DockerContainerRuntime {\n\t\treturn PodAdmitResult{Admit: true}\n\t}\n\n\t// Make sure docker api version is valid.\n\trversion, err := a.Runtime.APIVersion()\n\tif err != nil {\n\t\treturn PodAdmitResult{\n\t\t\tAdmit: false,\n\t\t\tReason: \"NoNewPrivs\",\n\t\t\tMessage: fmt.Sprintf(\"Cannot enforce NoNewPrivs: %v\", err),\n\t\t}\n\t}\n\tv, err := rversion.Compare(\"1.23.0\")\n\tif err != nil {\n\t\treturn PodAdmitResult{\n\t\t\tAdmit: false,\n\t\t\tReason: \"NoNewPrivs\",\n\t\t\tMessage: fmt.Sprintf(\"Cannot enforce NoNewPrivs: %v\", err),\n\t\t}\n\t}\n\t// If the version is less than 1.23 it will return -1 above.\n\tif v == -1 {\n\t\treturn PodAdmitResult{\n\t\t\tAdmit: false,\n\t\t\tReason: \"NoNewPrivs\",\n\t\t\tMessage: fmt.Sprintf(\"Cannot enforce NoNewPrivs: docker runtime API version %q must be greater than or equal to 1.23\", rversion.String()),\n\t\t}\n\t}\n\n\treturn PodAdmitResult{Admit: true}\n}\n\nfunc noNewPrivsRequired(pod *v1.Pod) bool {\n\t// Iterate over pod containers and check if we added no-new-privs.\n\tfor _, c := range pod.Spec.Containers {\n\t\tif c.SecurityContext != nil && c.SecurityContext.AllowPrivilegeEscalation != nil && !*c.SecurityContext.AllowPrivilegeEscalation {\n\t\t\treturn true\n\t\t}\n\t}\n\treturn false\n}\n\nfunc NewProcMountAdmitHandler(runtime kubecontainer.Runtime) PodAdmitHandler {\n\treturn &procMountAdmitHandler{\n\t\tRuntime: runtime,\n\t}\n}\n\ntype procMountAdmitHandler struct {\n\tkubecontainer.Runtime\n}\n\nfunc (a *procMountAdmitHandler) Admit(attrs *PodAdmitAttributes) PodAdmitResult {\n\t// If the pod is already running or terminated, no need to recheck NoNewPrivs.\n\tif attrs.Pod.Status.Phase != v1.PodPending {\n\t\treturn PodAdmitResult{Admit: true}\n\t}\n\n\t// If the containers in a pod only need the default ProcMountType, admit it.\n\tif procMountIsDefault(attrs.Pod) {\n\t\treturn PodAdmitResult{Admit: true}\n\t}\n\n\t// Always admit runtimes except docker.\n\tif a.Runtime.Type() != kubetypes.DockerContainerRuntime {\n\t\treturn PodAdmitResult{Admit: true}\n\t}\n\n\t// Make sure docker api version is valid.\n\t// Merged in https://github.com/moby/moby/pull/36644\n\trversion, err := a.Runtime.APIVersion()\n\tif err != nil {\n\t\treturn PodAdmitResult{\n\t\t\tAdmit: false,\n\t\t\tReason: \"ProcMount\",\n\t\t\tMessage: fmt.Sprintf(\"Cannot enforce ProcMount: %v\", err),\n\t\t}\n\t}\n\tv, err := rversion.Compare(\"1.38.0\")\n\tif err != nil {\n\t\treturn PodAdmitResult{\n\t\t\tAdmit: false,\n\t\t\tReason: \"ProcMount\",\n\t\t\tMessage: fmt.Sprintf(\"Cannot enforce ProcMount: %v\", err),\n\t\t}\n\t}\n\t// If the version is less than 1.38 it will return -1 above.\n\tif v == -1 {\n\t\treturn PodAdmitResult{\n\t\t\tAdmit: false,\n\t\t\tReason: \"ProcMount\",\n\t\t\tMessage: fmt.Sprintf(\"Cannot enforce ProcMount: docker runtime API version %q must be greater than or equal to 1.38\", rversion.String()),\n\t\t}\n\t}\n\n\treturn PodAdmitResult{Admit: true}\n}\n\nfunc procMountIsDefault(pod *v1.Pod) bool {\n\t// Iterate over pod containers and check if we are using the DefaultProcMountType\n\t// for all containers.\n\tfor _, c := range pod.Spec.Containers {\n\t\tif c.SecurityContext != nil {\n\t\t\tif c.SecurityContext.ProcMount != nil && *c.SecurityContext.ProcMount != v1.DefaultProcMount {\n\t\t\t\treturn false\n\t\t\t}\n\t\t}\n\t}\n\n\treturn true\n}\n", "meta": {"content_hash": "ad753220939e70346f0e752215cfd7e7", "timestamp": "", "source": "github", "line_count": 293, "max_line_length": 183, "avg_line_length": 29.757679180887372, "alnum_prop": 0.7153343273311159, "repo_name": "quinton-hoole/kubernetes", "id": "c84f7d9ad574b77fec952c8af61d031950b2ab8b", "size": "9288", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "pkg/kubelet/lifecycle/handlers.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "2840"}, {"name": "Dockerfile", "bytes": "52078"}, {"name": "Go", "bytes": "48505969"}, {"name": "HTML", "bytes": "38"}, {"name": "Lua", "bytes": "17200"}, {"name": "Makefile", "bytes": "66684"}, {"name": "PowerShell", "bytes": "100022"}, {"name": "Python", "bytes": "3290080"}, {"name": "Ruby", "bytes": "431"}, {"name": "Shell", "bytes": "1555091"}, {"name": "sed", "bytes": "12331"}]}} {"text": "require \"stylesheet_flipper/view_helpers\"\n\nmodule StylesheetFlipper\n class Railtie < Rails::Railtie\n initializer \"stylesheet_flipper.view_helpers\" do\n ActionView::Base.send :include, StylesheetFlipper::ViewHelpers\n end\n initializer \"stylesheet_flipper.initialize_rails\", :group => :all do |app|\n app.assets.register_bundle_processor 'text/css', :stylesheet_flipper do |context, data|\n if context.logical_path.include?('-flipped')\n R2.r2 data\n else\n data\n end\n end\n end\n end\nend\n", "meta": {"content_hash": "fbca2a0a4a296f7f53b96f83a65f4ed1", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 93, "avg_line_length": 30.333333333333332, "alnum_prop": 0.6758241758241759, "repo_name": "liisberg-consulting/stylesheet_flipper", "id": "2388bdd300d9edc09a8a25e9eac9d540ec36e802", "size": "546", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/stylesheet_flipper/railtie.rb", "mode": "33188", "license": "mit", "language": [{"name": "Ruby", "bytes": "4137"}]}} {"text": "require 'test_helper'\n\nclass EncounterImporterTest < Minitest::Test\n \n def setup\n collection_fixtures('providers', '_id')\n end\n\n def test_encounter_importing\n doc = Nokogiri::XML(File.new('test/fixtures/NISTExampleC32.xml'))\n doc.root.add_namespace_definition('cda', 'urn:hl7-org:v3')\n pi = HealthDataStandards::Import::C32::PatientImporter.instance\n patient = pi.parse_c32(doc)\n\n encounter = patient.encounters[0]\n assert encounter.codes['CPT'].include? '99241'\n assert_equal encounter.performer.title, \"Dr.\"\n assert_equal 'Kildare', encounter.performer.family_name\n assert_equal encounter.facility.name, 'Good Health Clinic'\n assert encounter.reason.codes['SNOMED-CT'].include? '308292007'\n assert_equal encounter.admit_type['code'], 'xyzzy'\n assert_equal encounter.admit_type['codeSystem'], 'CPT'\n assert_equal 'HL7 Healthcare Service Location', encounter.facility.code['codeSystem']\n assert_equal Time.gm(2000, 4, 7).to_i, encounter.facility.start_time\n assert_equal '1117-1', encounter.facility.code['code']\n end\nend\n", "meta": {"content_hash": "75bdc619a091d6dbc3a52bc95014f845", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 89, "avg_line_length": 39.81481481481482, "alnum_prop": 0.7227906976744186, "repo_name": "thecristen/health-data-standards", "id": "8d505db12b2f3d9fc20e56c11fc7436c9411d3bc", "size": "1076", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "test/unit/import/c32/encounter_importer_test.rb", "mode": "33188", "license": "apache-2.0", "language": [{"name": "HTML", "bytes": "309320"}, {"name": "KiCad", "bytes": "1227388"}, {"name": "Ruby", "bytes": "745736"}, {"name": "XSLT", "bytes": "364843"}]}} {"text": "\n\n\n\n\n\n\nedu.uci.ics.jung.algorithms.matrix Class Hierarchy (jung2 2.0.1 API)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n \n \n \n \n \n \n \n \n \n \n
Overview  Package  Class  Use   Tree  Deprecated  Index  Help 
\n
\n\n
\n PREV \n NEXT\n FRAMES  \n NO FRAMES  \n \n\n\n\n
\n\n\n\n


\n
\n

\nHierarchy For Package edu.uci.ics.jung.algorithms.matrix\n

\n
\n
\n
Package Hierarchies:
All Packages
\n
\n

\nClass Hierarchy\n

\n\n

\nInterface Hierarchy\n

\n\n
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n \n \n \n \n \n \n \n \n \n \n
Overview  Package  Class  Use   Tree  Deprecated  Index  Help 
\n
\n\n
\n PREV \n NEXT\n FRAMES  \n NO FRAMES  \n \n\n\n\n
\n\n\n\n
\nCopyright © 2010 null. All Rights Reserved.\n\n\n", "meta": {"content_hash": "901a64bbd88bc154c2d371360a22a933", "timestamp": "", "source": "github", "line_count": 156, "max_line_length": 708, "avg_line_length": 46.69871794871795, "alnum_prop": 0.6256691832532602, "repo_name": "tobyclemson/msci-project", "id": "d8ca4877aec259bffc8d00215bf4c921d164fa21", "size": "7285", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vendor/jung-2.0.1/doc/edu/uci/ics/jung/algorithms/matrix/package-tree.html", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "89867"}, {"name": "Ruby", "bytes": "137019"}]}} {"text": "\n\n\n \n \n \n \n \n\n\n
\nwindow.hdjs = {};\nwindow.hdjs.base = '../';\nwindow.hdjs.uploader = 'php/uploader.php?';\nwindow.hdjs.filesLists = 'php/filesLists.php?';\n
\n\n\n", "meta": {"content_hash": "f3b3ed2f92bcc5ecb4865a64d96c0e0e", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 96, "avg_line_length": 28.807692307692307, "alnum_prop": 0.6221628838451269, "repo_name": "houdunwang/hdjs", "id": "cfdc6e5e0088af2d81ddcd5c65222d37009ba17e", "size": "749", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/prismjs.html", "mode": "33188", "license": "mit", "language": [{"name": "ActionScript", "bytes": "145251"}, {"name": "AngelScript", "bytes": "1655"}, {"name": "C", "bytes": "10562"}, {"name": "C++", "bytes": "146348"}, {"name": "CSS", "bytes": "668826"}, {"name": "CoffeeScript", "bytes": "2471"}, {"name": "HTML", "bytes": "244754"}, {"name": "JavaScript", "bytes": "4658455"}, {"name": "Makefile", "bytes": "7045"}, {"name": "Objective-C", "bytes": "2353"}, {"name": "PHP", "bytes": "20344"}, {"name": "Ruby", "bytes": "587"}]}} {"text": "

\n \n

\n

\n \n

\n\n[![Version](https://img.shields.io/cocoapods/v/SwiftPages.svg?style=flat)](http://cocoapods.org/pods/SwiftPages)\n[![License](https://img.shields.io/cocoapods/l/SwiftPages.svg?style=flat)](http://cocoapods.org/pods/SwiftPages)\n[![Platform](https://img.shields.io/cocoapods/p/SwiftPages.svg?style=flat)](http://cocoapods.org/pods/SwiftPages)\n\n

Features

\n---\n\n- A simple yet beautifully architected solution for management of paged-style view controllers.\n- Dynamic loading of view controllers, allowing handling of high amounts of data without compromising memory.\n- Highly customisable, all items have clean API\u2019s to change them to any appearance or size.\n- Can be sized and positioned anywhere within a view controller.\n- Made for iPhone and iPad.\n- Extensively documented code for quick understanding.\n
\n

\n \n

\n\n

Installation

\n---\n\n### CocoaPods\n\nSwiftPages is available through [CocoaPods](http://cocoapods.org). To install\nit, simply add the following line to your Podfile:\n\n```ruby\npod \"SwiftPages\"\n```\n\n### Manual\n\nJust Include the SwiftPages.swift file found on the demo in your project, and you\u2019re good to go!\n\n

Usage

\n---\n\nUsing **SwiftPages** in your project is very simple and straightforward. \n\n### Create a SwiftPages Instance\n\nFirst create your SwiftPages instance, there are two ways to do it, as an **IBOoutlet** of a view of type SwiftPages from the storyboard, or programmatically:\n\n**As an IBOoutlet of a view of type SwiftPages from the storyboard**\n
\nPlace a UIView in your view controller and assign its constraints, make its class be of type SwiftPages. Then control drag to your view controller as an IBOutlet.\n\t\n**As a fully programmatic SwiftPages view.**\n
\nDeclare it in the viewDidLoad function of your view controller and set the desired position and size:\n```swift\nlet swiftPagesView : SwiftPages!\nswiftPagesView = SwiftPages(frame: CGRectMake(0, 0, self.view.frame.width, self.view.frame.height))\n```\nThen, after the initialization (described below), add it as a subview on your view controller:\n```swift\nself.view.addSubview(swiftPagesView)\n```\n\n### Initialization\nSwiftPages can be initialized in one of two ways:\n\n**Initialize with images as buttons on the top bar:**\n
\nFirst create an array of strings, the strings will be the Storyboard ID's of the view controllers you would like to include:\n```swift\nvar VCIDs : [String] = [\"FirstVC\", \"SecondVC\", \"ThirdVC\", \"FourthVC\", \"FifthVC\"]\n```\nThen create an array of UIImages which will correlate in order to the VC ID's array created above, it also has to have the same number of items as the aforementioned array:\n```swift\nvar buttonImages : [UIImage] = [UIImage(named:\"HomeIcon.png\")!,\n UIImage(named:\"LocationIcon.png\")!,\n UIImage(named:\"CollectionIcon.png\")!,\n UIImage(named:\"ListIcon.png\")!,\n UIImage(named:\"StarIcon.png\")!]\n```\nFinally, use the `initializeWithVCIDsArrayAndButtonImagesArray` function with the two arrays created:\n```swift\nswiftPagesView.initializeWithVCIDsArrayAndButtonImagesArray(VCIDs, buttonImagesArray: buttonImages)\n```\n\n**Initialize with text on buttons:**\n
\nFirst, alike with the image initialization, create an array of strings, the strings will be the Storyboard ID's of the view controllers you would like to include:\n```swift\nvar VCIDs : [String] = [\"FirstVC\", \"SecondVC\", \"ThirdVC\", \"FourthVC\", \"FifthVC\"]\n```\nThen create an array of titles which will correlate in order to the VC ID's array created above, it must have the same number of items as the aforementioned array:\n```swift\nvar buttonTitles : [String] = [\"Home\", \"Places\", \"Photos\", \"List\", \"Tags\"]\n```\nFinally, use the `initializeWithVCIDsArrayAndButtonTitlesArray` function with the two arrays created:\n```swift\nswiftPagesView.initializeWithVCIDsArrayAndButtonTitlesArray(VCIDs, buttonTitlesArray: buttonTitles)\n```\n\n

Customisation

\n---\n\nOnce you have your `SwiftPages` instance you can customize the appearance of all item's using the class API's, to view the API list look for the `API's` Mark on the SwiftPages class. Below is a brief customization sample:\n```swift\nswiftPagesView.enableAeroEffectInTopBar(true)\nswiftPagesView.setButtonsTextColor(UIColor.whiteColor())\nswiftPagesView.setAnimatedBarColor(UIColor.whiteColor())\n```\n\n

Example

\n---\n\nYou can find a full example on usage and customization on the Xcode project attached to this repository.\n\n

License

\n---\n\nThe MIT License (MIT)\n\n**Copyright (c) 2015 Gabriel Alvarado (gabrielle.alva@gmail.com)**\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n", "meta": {"content_hash": "3ee0dc6d302cc7ec08b3e62ac1da1c22", "timestamp": "", "source": "github", "line_count": 142, "max_line_length": 221, "avg_line_length": 43.45070422535211, "alnum_prop": 0.7482982171799027, "repo_name": "chipivk/SwiftPages", "id": "bd01826b3e1a7bf8608583e08afee87d32eaaf39", "size": "6174", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "README.md", "mode": "33261", "license": "mit", "language": [{"name": "Ruby", "bytes": "777"}, {"name": "Swift", "bytes": "62442"}]}} {"text": "import unittest\n\nfrom webkitpy.common.system.systemhost_mock import MockSystemHost\n\nfrom webkitpy.layout_tests.port.base import Port\nfrom webkitpy.layout_tests.port.driver import Driver, DriverOutput\nfrom webkitpy.layout_tests.port import browser_test, browser_test_driver\nfrom webkitpy.layout_tests.port.server_process_mock import MockServerProcess\n\nfrom webkitpy.layout_tests.port.port_testcase import TestWebKitPort\n\nfrom webkitpy.tool.mocktool import MockOptions\n\n\nclass BrowserTestDriverTest(unittest.TestCase):\n def test_read_stdin_path(self):\n port = TestWebKitPort()\n driver = browser_test_driver.BrowserTestDriver(port, 0, pixel_tests=True)\n driver._server_process = MockServerProcess(lines=[\n 'StdinPath: /foo/bar', '#EOF'])\n content_block = driver._read_block(0)\n self.assertEqual(content_block.stdin_path, '/foo/bar')\n driver._stdin_directory = None\n", "meta": {"content_hash": "17da925bdfa83456a8f026f406c8d916", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 81, "avg_line_length": 39.91304347826087, "alnum_prop": 0.7559912854030502, "repo_name": "highweb-project/highweb-webcl-html5spec", "id": "576e0b0863783bf19cf80b8d9a25e716d8f37d58", "size": "2445", "binary": false, "copies": "1", "ref": "refs/heads/highweb-20160310", "path": "third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/browser_test_driver_unittest.py", "mode": "33188", "license": "bsd-3-clause", "language": []}} {"text": "\n\n\t\n\t\t\n\t\t\n\n\t\tArticulating Design Decisions\n\n\t\t\n\t\t\n\n\t\t\n\t\t\n\n\t\t\n\t\t\n\t\n\t\n\t\t
\n\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t
\n\t\t\t\t\t\t

Articulating Design Decisions

\n\t\t\t\t\t\t

\n\t\t\t\t\t\t\tBrought to you by the good designers of Gaslight.\n\t\t\t\t\t\t

\n\t\t\t\t\t\t
\n\t\t\t\t\t\t

\n\t\t\t\t\t\t\tteamgaslight.com\n\t\t\t\t\t\t

\n\t\t\t\t\t
\n\t\t\t\t\t
\n\t\t\t\t\t\t

Table of Contents

\n\t\t\t\t\t\t\n\t\t\t\t\t\t\t
    \n\t\t\t\t\t\t\t\t
  1. A Maturing Industry
  2. \n\t\t\t\t\t\t\t\t
  3. Great Designers are Great Communicators
  4. \n\t\t\t\t\t\t\t\t
  5. Understanding Relationships
  6. \n\t\t\t\t\t\t\t\t
  7. Reducing Cognitive Load
  8. \n\t\t\t\t\t\t\t\t
  9. Listening Is Understanding
  10. \n\t\t\t\t\t\t\t\t
  11. The Right Frame of Mind
  12. \n\t\t\t\t\t\t\t\t
  13. The Response: Strategy and Tactics
  14. \n\t\t\t\t\t\t\t\t
  15. The Response: Common Messages
  16. \n\t\t\t\t\t\t\t\t
  17. The Ideal Response: Getting Agreement
  18. \n\t\t\t\t\t\t\t\t
  19. Meeting Adjourned: The After-Party
  20. \n\t\t\t\t\t\t\t\t
  21. Recovering from Disaster
  22. \n\t\t\t\t\t\t\t\t
  23. For Nondesigners
  24. \n\t\t\t\t\t\t\t\t
  25. Designing for Vision
  26. \n\t\t\t\t\t\t\t
\n\t\t\t\t\t\t
\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t

Our Goal

\n\t\t\t\t

Information is freer than ever \u2014 but that means that there's more to dig through to find gems

\n\t\t\t\t

The Design Squad\u2122, in the interest of ongoing education, took on the task of reading and synthesizing content and then bringing it together.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t

Our Goal

\n\t\t\t\t

Then obviously we needed to share it with everyone else too.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

The Book Selection

\n\t\t\t\t\t

Since it's been on the docket for a while and everyone could benefit from amping up their consulting skills, we chose to read Articulating Design Decisions by Tom Greever

\n\t\t\t\t
\n\n\n\n\t\t\t\t
\n\t\t\t\t

Chapter 1

\n\t\t\t\t

A Maturing Industry

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Design: The New Hotness

\n\t\t\t\t\t

Design, once seen as niche and auxiliary, has come front-and-center in a world where people use more and more software products and come to expect them to look and work well.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Design: The New Hotness

\n\t\t\t\t\t

Good design is very clearly tied to product success.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Design: The New Hotness

\n\t\t\t\t\t

It would then follow that designers, those agents of design, would also be the new hotness, right?

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Design[ers]: The Reality

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • UX is a young discipline that's still being figured out.
  • \n\t\t\t\t\t\t
  • Designers are practiced in justifying design to designers, less so with those outside the discipline.
  • \n\t\t\t\t\t\t
  • Design is as much about subjective personal preferences as it is about logically-made, research-backed decisions.
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t

Chapter 2

\n\t\t\t\t

Great Designers Are Great Communicators

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Everyone's a Critic

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Non-Designers are immersed in designed experiences and can tell what looks good, but can't express why.
  • \n\t\t\t\t\t\t
  • Design is \"special\" in that people uninvolved can have an opinion on the \"how\" of the work, not just the \"what\".
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Design House of Horrors

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • The CEO Button
  • \n\t\t\t\t\t\t
  • Home Page Syndrome
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Using Your Words

\n\t\t\t\t\t

Most issues are grounded in miscommunication or misunderstandings.

\n\t\t\t\t\t

\"Most issues\" being 99.9% of the problems.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Using Your Words

\n\t\t\t\t\t

Words are powerful! They are the tools you use to steer people in the right direction!

\n\t\t\t\t\t

The key is to understand what message you want to communicate and the response you want.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Using Your Words

\n\t\t\t\t\t

Being able to articulate your design choices gains the trust of your clients and keeps you from getting steamrolled by...

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Imparting intelligence.
  • \n\t\t\t\t\t\t
  • Demonstrating intentionality.
  • \n\t\t\t\t\t\t
  • Showing respect.
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making a Successful Design

\n\t\t\t\t\t

A successful design will...

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  1. Solve a problem.
  2. \n\t\t\t\t\t\t
  3. Be easy for users.
  4. \n\t\t\t\t\t\t
  5. Be supported by everyone.
  6. \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

To find the right solution, you need a clearly defined problem.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

Set goals and key progress indicators to establish what success looks like. These can be drawn from what's important to your stakeholders.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

You must be consciously aware of each decision made and why.

\n\t\t\t\t\t

Ask \"what problem am I trying to solve with this?\" and then answer it. Write it down to get in the habit!

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

Practice describing your designs without visual aides to become more precise in how you clarify your designs and thinking.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

Making sure a design actually resonates with users is grounded in intentionality. \"How does this affect the user?\"

\n\t\t\t\t\t

It's entirely legitimate to make your best guesses and then try them out.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

Practice writing stories about these design changes within the context of how they affect the user.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

The most sicknasty design doesn't get you anywhere if you can't sell people on it.

\n\t\t\t\t\t

Even worse, people will continue to suggest other alternatives if they're not convinced that you're right.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

You need to get clients and other team members on your level!

\n\t\t\t\t\t

Shared understanding of what you're trying to achieve and where you're headed helps achieve this.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

To properly share your design savvy with others on your team takes a little additional legwork.

\n\t\t\t\t\t

It's easy to land on what looks like the right decision, but leave no stone unturned — finding these other solutions and knowing why they don't work allows you to summon this information later if they're pitched as alternatives.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Makings of a Successful Design

\n\t\t\t\t\t

Being right is satisfying, but knowing why and being thoughtful are more important than being able to design the perfect solution every time.

\n\t\t\t\t
\n\n\n\n\n\n\n\n\t\t\t\t
\n\t\t\t\t

Chapter 3

\n\t\t\t\t

Understanding Relationships

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

First, Improving Communication

\n\t\t\t\t\t

The single most important thing you can do to improve communication is to improve relationships.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Relationships & Stakeholder

\n\t\t\t\t\t

Uxers are so good at putting the user first but often fail to do the same for the stakeholders, the people who have influence over the project.

\n\t\t\t\t\t

Applying the same principles we use to put the users first should be applied to the people we work with, that way we can create a better product together

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Relationships & Stakeholder

\n\t\t\t\t\t

In order to approach them the right way we need to:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • See them as human
  • \n\t\t\t\t\t\t
  • Create shared experiences
  • \n\t\t\t\t\t\t
  • Develop empathy
  • \n\t\t\t\t\t\t
  • Ask good questions
  • \n\t\t\t\t\t\t
  • Identifying influencers
  • \n\t\t\t\t\t\t
  • Building good relationships
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

See them as human

\n\t\t\t\t\t

There are always things that are influencing people\u2019s behaviors that we don\u2019t know about and there will always be things that we simply can\u2019t predict.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

See them as human

\n\t\t\t\t\t

A person\u2019s attitudes and responses to your work might have more to do with the things outside of what you\u2019re showing them.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Create shared experiences

\n\t\t\t\t\t

When we don\u2019t have anything in common with another person, it\u2019s nearly impossible to talk to them.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Create shared experiences

\n\t\t\t\t\t

Finding ways to create connections with other people is an important step toward understanding them.

\n\t\t\t\t\t

Connections can be made easily through simple questions (non work questions!).

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Develop empathy

\n\t\t\t\t\t

Having empathy for stakeholders allows you to understand from their perspective.

\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Develop empathy

\n\t\t\t\t\t

It simply means that your priority for communicating with them has shifted from a position of defense to one of solidarity.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Ask good questions

\n\t\t\t\t\t

You should learn to view things from the perspective of your stakeholders in the same way that you would with users of your application

\n\t\t\t\t\t

By asking questions.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Identifying Influencers

\n\t\t\t\t\t

Every project has a variety of people who influence its outcome, the three main types you need to understand are:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Team Influencers
  • \n\t\t\t\t\t\t
  • Executive Influencers
  • \n\t\t\t\t\t\t
  • External Influencers\t
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Build Good Relationships

\n\t\t\t\t\t

Communication is much easier in good relationships.

\n\t\t\t\t\t

Good relationships take work.

\n\t\t\t\t\t

Take the time to do the simple things that will help you to improve your relationships and, as a byproduct, your communication with each other

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\n\t\t\t\t
\n\t\t\t\t

Chapter 4

\n\t\t\t\t

Reducing Cognitive Load

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Reducing Cognitive Load

\n\t\t\t\t\t

Reducing the cognitive load not only for the stakeholders but our team and for ourselves so we can have a

\n\t\t\t\t\t

Succesful

\n\t\t\t\t\t

Productive

\n\t\t\t\t\t

Valuable Meeting

\n\t\t\t\t\t

But how??

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Remove Distractions!

\n\t\t\t\t\t

A lot of people are easily distracted by things that simply do not matter to the goal of the meeting.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Remove Distractions!

\n\t\t\t\t\t

Getting to know people can help identify what is distracting to them, so you can remove those things from the conversation.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Anticipate Reactions

\n\t\t\t\t\t

When we combine what we know about the infleuncers of the project with the values they carry in their role, we can make some pretty good guesses about how they\u2019ll respond to our designs

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Anticipate Reactions

\n\t\t\t\t\t

So! Using this information we should curate the flow of our design discussion.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Create a Support Network

\n\t\t\t\t\t

Getting other people to support your decisions is about showing that you\u2019re not alone in your ideas.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\n\t\t\t\t
\n\t\t\t\t\t

& Finally

\n\t\t\t\t\t

The Dress Rehearsal

\n\t\t\t\t\t

Now that you understand your stakeholders, have removed the distractions, anticipated their reactions, and gathered a group of people to back you up, it\u2019s time to:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Make a List
  • \n\t\t\t\t\t\t
  • Practice Out loud
  • \n\t\t\t\t\t\t
  • Prep Everyone
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Now, Let's Have

\n\t\t\t\t\t

A Succesful Meeting

\n\t\t\t\t\t

Reducing Cognitive Load for us and our Stakeholders allows everyone to focus on the decisions at hand creating more productive conversations.

\n\t\t\t\t\t

Productivity Rules!!

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\n\n\n\n\n\n\n\t\t\t\t
\n\t\t\t\t\t

Chapter 5

\n\t\t\t\t\t

Listening is Understanding

\n\t\t\t\t\t
\n\t\t\t\t\t\t

No man ever listened himself out of a job.

\n\t\t\t\t\t\t

―Calvin Coolidge

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Implicit Activities

\n\t\t\t\t\t

\n\t\t\t\t\t\t Be empathetic. Hear what your stakeholders are saying and try to understand the meaning of what's being said from their perspective.\n\t\t\t\t\t

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Let them talk
  • \n\t\t\t\t\t\t
  • Hear what isn't being said
  • \n\t\t\t\t\t\t
  • Uncover the real problem
  • \n\t\t\t\t\t\t
  • The art of the pause
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Let them Talk

\n\t\t\t\t\t

Give your stakeholders the space they need to fully describe their ideas.

\n\t\t\t\t\t

Three main benfits:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  1. They will make themselves more clear
  2. \n\t\t\t\t\t\t
  3. It gives them confidence that they were understood
  4. \n\t\t\t\t\t\t
  5. It demonstrates that you value what they're saying
  6. \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Trust is Everything

\n\t\t\t\t\t

Showing that you can let someone express their ideas freely will help to build trust.

\n\t\t\t\t\t

They will be more likely to agree with you if you can relate to what they've said.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Hear What isn't Being Said

\n\t\t\t\t\t

\n\t\t\t\t\t\tThere is subtext to every bit of feedback. The better we can decode these messages, the better we can correct our course of action.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Uncover the Real Problem

\n\t\t\t\t\t

People naturally jump to solutions. It's our job to identify the problems behind those solutions.

\n\t\t\t\t\t

Ask the 5 Whys

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

The Art of the Pause

\n\t\t\t\t\t
\n\t\t\t\t\t\t

The right word may be effective, but no word was ever as effective as a rightly timed pause.

\n\t\t\t\t\t\t

―Mark Twain

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

The Three Purposes of the Pause

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  1. Give them a chance to expand on, or correct, what they've just said.
  2. \n\t\t\t\t\t\t
  3. Let their statements sink in. Take a few seconds to check yourself and form an appropiate response.
  4. \n\t\t\t\t\t\t
  5. \n\t\t\t\t\t\t\tNonverbally communicate that what was just said is worth taking the time to seriously consider and ponder over for a moment.\n\t\t\t\t\t\t\t

    Be aware of your body language.

    \n\t\t\t\t\t\t
  6. \n\t\t\t\t\t
\n\t\t\t\t
\n\n\t\t\t\t
\n\t\t\t\t\t

Explicit Activities

\n\t\t\t\t\t

Verbally demonstrate that you're listening and outwardly show that you're engaged in the conversation

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Take notes
  • \n\t\t\t\t\t\t
  • Ask questions
  • \n\t\t\t\t\t\t
  • Repeat or rephrase what's being said
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Write Everything Down

\n\t\t\t\t\t

You're not going to remember everything that your stakeolders say or suggest—you're just not.

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Notes prevent you from having the same conversation again—create a paper trail.
  • \n\t\t\t\t\t\t
  • Notes free you to focus on being articulate.
  • \n\t\t\t\t\t\t
  • Notes build trust with your stakeholders.
  • \n\t\t\t\t\t\t
  • Notes keep the meeting on track.
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Taking Better Notes

\n\t\t\t\t\t

Your notes should be:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Accessible - even during the meeting
  • \n\t\t\t\t\t\t
  • Organized - related to UI elements or agenda items
  • \n\t\t\t\t\t\t
  • Specific - who suggested what?
  • \n\t\t\t\t\t\t
  • Definitive - have you reached a decison or is there follow-up work that needs to be done?
  • \n\t\t\t\t\t\t
  • Actionable - if there's no action, it's not useful
  • \n\t\t\t\t\t\t
  • Referenced - add links, URLs, screenshots, ect.
  • \n\t\t\t\t\t\t
  • Forward-looking - make room for follow-up discussions for the next meeting.
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Ask Questions

\n\t\t\t\t\t

So much of listening is just getting the other person to talk.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Helpful Questions

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • What problem are you trying to solve?
  • \n\t\t\t\t\t\t
  • What are the advantages of doing it this way?
  • \n\t\t\t\t\t\t
  • What do you suggest?
  • \n\t\t\t\t\t\t
  • How will this affect our goals?
  • \n\t\t\t\t\t\t
  • Where have you seen this before?
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Repeat or Rephrase

\n\t\t\t\t\t

Without a shared vocabulary, there will inevitably be misunderstanding.

\n\t\t\t\t\t
\n\t\t\t\t\t\t

The begining of wisdom is the definition of terms.

\n\t\t\t\t\t\t

―Socrates

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Rephrase:

\n\t\t\t\t\t

Convert \"Likes\" to \"Works\"

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Focus on effectiveness
  • \n\t\t\t\t\t\t
  • Ask clarifying questions \u2013 \"Why don't you think this works?\"
  • \n\t\t\t\t\t\t
  • Move from discussing preferences to describing functionality
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Repeat:

\n\t\t\t\t\t

\"What I Hear You Saying...\"

\n\t\t\t\t\t

Translate what's being said into what will become our common ground.

\n\t\t\t\t\t

Bridge the language gap by showing:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • You're listening to them
  • \n\t\t\t\t\t\t
  • You understand what they said
  • \n\t\t\t\t\t\t
  • You can express their ideas in our own words that are more helpful in the design decision-making process
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\n\t\t\t\t
\n\t\t\t\t\t

Chapter 6

\n\t\t\t\t\t

The Right Frame of Mind

\n\t\t\t\t\t

Thank. Repeat. Prepare.

\n\t\t\t\t\t
\n\t\t\t\t\t\t

First learn the meaning of what you say, and then speak.

\n\t\t\t\t\t\t

―Epictetus

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Give Up Control

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • You're going to need approval from others.
  • \n\t\t\t\t\t\t
  • Don't take feedback personally.
  • \n\t\t\t\t\t\t
  • Your work is not your own—you need help from others.
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Check Your Ego at the Door

\n\t\t\t\t\t

See the value in what the other person is saying.

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • You're not the only one with good ideas
  • \n\t\t\t\t\t\t
  • You don't have all the best solutions
  • \n\t\t\t\t\t\t
  • Your way isn't the only way to accomplish the goals
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Check Your Ego at the Door

\n\t\t\t\t\t

Removing your ego makes you less defensive and therefore better prepared to respond appropriately.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Lead with a YES

\n\t\t\t\t\t

We're all in this together. We're headed towards the same goals and with the same level of passion for the product.

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Be collaborative
  • \n\t\t\t\t\t\t
  • Give ideas permission to succeed, even if they might seem impossible
  • \n\t\t\t\t\t\t
  • Keep the conversation open-ended
  • \n\t\t\t\t\t\t
  • Empower people to share their thoughts, ideas and be part of the solution
  • \n\t\t\t\t\t\t
  • Build trust and confidence with your stakeholders
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Be Charming

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • Have confidence
  • \n\t\t\t\t\t\t
  • Just be yourself
  • \n\t\t\t\t\t\t
  • Don't take yourself so seriously
  • \n\t\t\t\t\t\t
  • Orient yourself towards others
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Change Your Vocabulary

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\t \"You're wrong\" \n\t\t\t\t\t\t\t
      \n\t\t\t\t\t\t\t\t
    • Stay positive and always lead with a \"yes\"
    • \n\t\t\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\t \"From a design perspective...\" \n\t\t\t\t\t\t\t
      \n\t\t\t\t\t\t\t\t
    • \"The reason we did it this way...\"
    • \n\t\t\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\t\"Like\" and \"Don't like\"\n\t\t\t\t\t\t\t
      \n\t\t\t\t\t\t\t\t
    • Focus on what works and doesn't work
    • \n\t\t\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\t Too much jargon \n\t\t\t\t\t\t\t
      \n\t\t\t\t\t\t\t\t
    • Stay within the vernacular
    • \n\t\t\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Make a Transition

\n\t\t\t\t\t

The response before the response.

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  1. Thank
  2. \n\t\t\t\t\t\t
  3. Repeat
  4. \n\t\t\t\t\t\t
  5. Prepare
  6. \n\t\t\t\t\t
\n\t\t\t\t
\n\n\n\n\n\n\n\n\n\t\t\t\t
\n\t\t\t\t\t

Chapter 10

\n\t\t\t\t\t

Meeting Adjourned: The After-Party

\n\t\t\t\t\t
\n\t\t\t\t\t\t

The single biggest problem in communication is the illusion that it has taken place.

\n\t\t\t\t\t\t

―George Bernard Shaw

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

The time after the meeting is crucial. It's prime time to make sure no disasterous design decisions are made. Some things you'll need to do:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tStick around and chat with people\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tFollow up quickly with your notes\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tApply filters and remove the fluff\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tSeek out individuals who can help you\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tMake decisions when there is ambiguity\n\t\t\t\t\t\t
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

The Meeting After the Meeting

\n\t\t\t\t\t

The hallway after a meeting is where the real feelings come out.

\n\t\t\t\t\t
    \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

The Meeting After the Meeting

\n\t\t\t\t\t

This is the time where decisions can be solidified and people can share their thoughts more openly. Often participants can feel too timid to bring something up in a meeting because they may feel like their opinion isn't as influential or as solid as someone else's. However, they may feel more comfortable opening up after the meeting.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Follow up Fast

\n\t\t\t\t\t

Recorded follow ups show that you value the people involved, their time, and their ideas.

\n\t\t\t\t\t
    \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Follow up Fast

\n\t\t\t\t\t

The follow up should be written preferably within an hour of the meeting, or at least within a day. It should include:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tA word of thanks to the participants\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tA list of what was discussed\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tAction items and next steps\n\t\t\t\t\t\t
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Follow up Fast

\n\t\t\t\t\t

Don't be afraid to delegate what people are doing what tasks. Be specific. Ask direct questions. Keep it brief.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Apply Filters

\n\t\t\t\t\t

Cut out unnecessary information so the team can stay focused.

\n\t\t\t\t\t
    \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Apply Filters

\n\t\t\t\t\t

Sometimes people bring up ideas just for the sake of innovation instead of concentrating on objectives. Here's how to assess a person with wild ideas:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tWhat are the person's intentions?\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tWhat is everyone's opinions of the person?\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tDo other people agree or disagree?\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tIs this person influential enough to matter?\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tIs this person likely to bring it up again?\n\t\t\t\t\t\t
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Apply Filters

\n\t\t\t\t\t

It's not about ignoring people, but learning to discern whether comments do or don't align with the project's objectives.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Apply Filters

\n\t\t\t\t\t
\n\t\t\t\t\t\t

\"If they aren't influential, no one agrees with them, and they aren't likely to bring it up again, it's a safe bet you can just move on and never mention it again.\"

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Apply Filters

\n\t\t\t\t\t

Incorporating everyone's opinions is a dangerous path. Fine tune your judgement to leave out what clouds the objectives.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Seek Out Individuals

\n\t\t\t\t\t

Be open to communication after the meeting.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Seek Out Individuals

\n\t\t\t\t\t

If there's someone you might benefit from talking to, find them immediately after the meeting. The purpose of these conversations is to give people the space to share their unfiltered thoughts outside the pressure of an organized setting.

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Do Something, Even if it's Wrong

\n\t\t\t\t\t

Since many meetings end with ambiguity, it's better to do something rather than nothing.

\n\t\t\t\t\t
    \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Do Something, Even if it's Wrong

\n\t\t\t\t\t
\n\t\t\t\t\t\t

\"It's better to do something (even if it's wrong) and give your team the opportunity to speak out for or against your choice rather than deal with stale decisions and a stagnant design process. Sometimes, you just need to decide and tell everyone else what you're going to do and get them to speak up.\"

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Do Something, Even if it's Wrong

\n\t\t\t\t\t

\n\t\t\t\t\t\tTake the lead and make some kind of rough draft in order to combat decision paralysis. It'll at least get people's attention.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Remember these things when the meeting is over:

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tThe time after the meeting is when you can hear people's unfiltered thoughts\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tFollow up quickly in order to communicate urgency, value, and decisiveness\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tFilter out unnecessary points from your notes that don't need follow up\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tBe open to communicating after the meeting\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tThe only way to move forward in times of ambiguity is to make some kind of decision, even if it's wrong\n\t\t\t\t\t\t
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\n\t\t\t\t
\n\t\t\t\t\t

Chapter 11

\n\t\t\t\t\t

Recovering from Disaster

\n\t\t\t\t\t
\n\t\t\t\t\t\t

In every difficult situation is potential value. Believe this; then begin looking for it.

\n\t\t\t\t\t\t

―Norman Vincent Peale

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

\n\t\t\t\t\t\tSometimes, no matter how hard we try, have to roll with design decisions we disagree with.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

How is This Possible?

\n\t\t\t\t\t

\n\t\t\t\t\t\tThe first step in addressing disaster is to understand why it happened and how it could have been avoided. A few causes for disaster:\n\t\t\t\t\t

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  1. \n\t\t\t\t\t\t\tThey have a specific need that isn't being met\n\t\t\t\t\t\t
  2. \n\t\t\t\t\t\t
  3. \n\t\t\t\t\t\t\tThey want to know they're being heard\n\t\t\t\t\t\t
  4. \n\t\t\t\t\t\t
  5. \n\t\t\t\t\t\t\tThere is a misunderstanding\n\t\t\t\t\t\t
  6. \n\t\t\t\t\t\t
  7. \n\t\t\t\t\t\t\tYour designs are not the best solution\n\t\t\t\t\t\t
  8. \n\t\t\t\t\t\t
  9. \n\t\t\t\t\t\t\tThey are completely unreasonable\n\t\t\t\t\t\t
  10. \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

How is This Possible?

\n\t\t\t\t\t

They have a specific need that isn't being met

\n\t\t\t\t\t

\n\t\t\t\t\t\tSometimes one need is there because the stakeholder has another underlying need that isn't being addressed.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

How is This Possible?

\n\t\t\t\t\t

They want to know they're being heard

\n\t\t\t\t\t

\n\t\t\t\t\t\tSometimes clients insist on a change because they don't think they're being heard or valued.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

How is This Possible?

\n\t\t\t\t\t

There is a misunderstanding

\n\t\t\t\t\t

\n\t\t\t\t\t\tMiscommunications come often and are a sure-fire way to disaster.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

How is This Possible?

\n\t\t\t\t\t

Your designs are not the best solution

\n\t\t\t\t\t

\n\t\t\t\t\t\tStakeholders are leaders for a reason and know the domain best, so they may be right.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t\n\t\t\t\t\t

\n\t\t\t\t\t\tSIT DOWN, BE HUMBLE\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

How is This Possible?

\n\t\t\t\t\t

They are completely unreasonable

\n\t\t\t\t\t

\n\t\t\t\t\t\tUsually when we think someone's unreasonable, we just fail to see things from their perspective. However, inherently unreasonable people do exist.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Changes You Disagree With

\n\t\t\t\t\t

\n\t\t\t\t\t\tSome tactics for addressing decisions that you feel might hinder the user experience:\n\t\t\t\t\t

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  1. \n\t\t\t\t\t\t\tMake it subtle\n\t\t\t\t\t\t
  2. \n\t\t\t\t\t\t
  3. \n\t\t\t\t\t\t\tMake it an option\n\t\t\t\t\t\t
  4. \n\t\t\t\t\t\t
  5. \n\t\t\t\t\t\t\tCarefully consider placement\n\t\t\t\t\t\t
  6. \n\t\t\t\t\t\t
  7. \n\t\t\t\t\t\t\tThe hidden menu\n\t\t\t\t\t\t
  8. \n\t\t\t\t\t\t
  9. \n\t\t\t\t\t\t\tPlan a space\n\t\t\t\t\t\t
  10. \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Changes You Disagree With

\n\t\t\t\t\t

Make it subtle

\n\t\t\t\t\t

\n\t\t\t\t\t\tFind a middle ground between what a stakeholder wants and what's cohesive for the UX.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Changes You Disagree With

\n\t\t\t\t\t

Make it an option

\n\t\t\t\t\t

\n\t\t\t\t\t\tIf it's viable for your product, you could limit it's audience.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Changes You Disagree With

\n\t\t\t\t\t

Carefully consider placement

\n\t\t\t\t\t

\n\t\t\t\t\t\tIf it's viable for your product, you could limit the amount of views it exists in.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Changes You Disagree With

\n\t\t\t\t\t

The hidden menu

\n\t\t\t\t\t

\n\t\t\t\t\t\tSometimes elements could be carefully tucked away in a less accessible location.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Changes You Disagree With

\n\t\t\t\t\t

Plan a space

\n\t\t\t\t\t

\n\t\t\t\t\t\tPlan an area in the layout that could easily accommodate temporary changes.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Lemonade

\n\t\t\t\t\t\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Lemonade

\n\t\t\t\t\t

\n\t\t\t\t\t\tUsually it's not one person's bad idea that will ruin a design. It's often poor execution of that possible bad idea that has the power to actually do damage.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Lemonade

\n\t\t\t\t\t

\n\t\t\t\t\t\tThink of decisions that you disagree with as constraints. Use those constraints to actually improve the product instead of plopping them there exactly as suggested.\n\t\t\t\t\t

\n\t\t\t\t\t
\n\t\t\t\t\t\t

\"One person's suggestions is a gold mine of other ideas waiting to be excavated.\"

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Making Lemonade

\n\t\t\t\t\t

\n\t\t\t\t\t\tTake a seemingly unfortunate decision and dig deeper. Being open to one change may reveal something else.\n\t\t\t\t\t

\n\t\t\t\t\t
\n\t\t\t\t\t\t

\"Occasionally, having a stakeholder insist on a change can lead you down a path to improving the app in a way that you never expected, solving problems you might not have uncovered otherwise.\"

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

The Bank Account of Trust

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tDeposits: When your design is praised\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tWithdrawls: When a client disagrees with your design\n\t\t\t\t\t\t
  • \n\t\t\t\t\t
\n\t\t\t\t\t

\n\t\t\t\t\t\tStakeholder relationships depend on this bank account of trust. Learn which battles are worth fighting. You have to constantly balance the needs of users and the requirements of stakeholders.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

When You're Wrong

\n\t\t\t\t\t

\n\t\t\t\t\t\tWhen you're wrong you can either:\n\t\t\t\t\t

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  1. \n\t\t\t\t\t\t\tOwn up to your decisions\n\t\t\t\t\t\t
  2. \n\t\t\t\t\t\t
  3. \n\t\t\t\t\t\t\tDeny the criticism\n\t\t\t\t\t\t
  4. \n\t\t\t\t\t\t
  5. \n\t\t\t\t\t\t\tAbsolve yourself of any involvement\n\t\t\t\t\t\t
  6. \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

When You're Wrong

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tBeing wrong can feel like a let down of trust, but it's actually an opportunity to build trust.\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tOwn up to your mistakes always. Say \"I was wrong\" and then go fix the problem.\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tCommunicate a sense of urgency and willingness to go above and beyond when fixing the issue.\n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t
  • \n\t\t\t\t\t\t\tDon't obsess over why it happened. It's not nearly as important as fixing the problem.\n\t\t\t\t\t\t
  • \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

When You're Wrong

\n\t\t\t\t\t

\n\t\t\t\t\t\tHow to know when you're wrong:\n\t\t\t\t\t

\n\t\t\t\t\t
    \n\t\t\t\t\t\t
  1. \n\t\t\t\t\t\t\tThe problem still exists\n\t\t\t\t\t\t
  2. \n\t\t\t\t\t\t
  3. \n\t\t\t\t\t\t\tUsers don't get it\n\t\t\t\t\t\t
  4. \n\t\t\t\t\t\t
  5. \n\t\t\t\t\t\t\tEveryone is against you\n\t\t\t\t\t\t
  6. \n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Painting a Duck

\n\t\t\t\t\t
\n\t\t\t\t\t\t

Half of the world is composed of people who have something to say and can't, and the other half who have nothing to say and keep on saying it.

\n\t\t\t\t\t\t

―Robert Frost

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Painting a Duck

\n\t\t\t\t\t

\n\t\t\t\t\t\tPeople have the tendency to spend a disproportionate amount of time \"bike-shedding,\" which is the act of wasting time on trivial details instead of more pressing but harder to grasp issues.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Painting a Duck

\n\t\t\t\t\t

\n\t\t\t\t\t\tSometimes the solution for a stubborn and unreasonable stakeholder is to \"paint a duck,\" or offer some carefully curated alternatives that you know they won't pick, just to point them in the right direction and focus them on the objectives of the project.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Managing Expectations

\n\t\t\t\t\t
\n\t\t\t\t\t\t

\"Your ability to properly set, adjust, and communicate expectations is more important than your ability to crank out killer designs on a daily basis.\"

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Managing Expectations

\n\t\t\t\t\t

\n\t\t\t\t\t\tOften projects fail because expectations weren't clearly communicated, not because of a lack of quality of work.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Managing Expectations

\n\t\t\t\t\t

\n\t\t\t\t\t\tWithout the support of your team, you won't succeed.\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Managing Expectations

\n\t\t\t\t\t
\n\t\t\t\t\t\t

\"The way you communicate with and manage relationships with stakeholders is critical to your success as a designer.\"

\n\t\t\t\t\t
\n\t\t\t\t
\n\n\n\n\n\n\n\n\n\n\n\n
\n\t

Chapter 12

\n\t

For the Non-Designers

\n\t
\n\t\t

What we do see depends mainly on what we look for ... In the same field the farmer will notice the crop, the geologists the fossils, botanists the flowers, artists the colouring, sportsmen the cover for the game. Though we may all look at the same things, it does not all follow that we should see them.

\n\t\t

―Sir John Lubbock

\n\t
\n
\n\n
\n\t

There are non-designers who are interested in learning to talk about design. They value good working relationships and realize that clear communication is key, because miscommunication can lead to

\n\t
    \n\t\t
  • \n\t\t\tmissed expectations, which lead to\n\t\t
  • \n\t\t
  • \n\t\t\tdisappointment and distrust.\n\t\t
  • \n\t
\n
\n\n
\n\t\n
\n\n
\n\t

Ten Tips for Working with Designers

\n\t
    \n\t\t
  1. Focus on what works
  2. \n\t\t
  3. Don't provide solutions
  4. \n\t\t
  5. Ask lots of questions
  6. \n\t\t
  7. Don't claim to be the user
  8. \n\t\t
  9. Let us explain our decisions
  10. \n\t
\n
\n\n
\n\t

Ten Tips for Working with Designers

\n\t
    \n\t\t
  1. Empower us to make decisions
  2. \n\t\t
  3. Use helpful language
  4. \n\t\t
  5. Ask if there is data
  6. \n\t\t
  7. Be prepared
  8. \n\t\t
  9. Give us what we need to be successful
  10. \n\t
\n
\n\n
\n\t

Design Project Checklist

\n\t

Shared understanding is vital to the success of a project. Areas to focus on include:

\n\t
    \n\t\t
  • Management, vision and goals
  • \n\t\t
  • Users or customers
  • \n\t\t
  • Workflow and communication
  • \n\t\t
  • Access to information and people
  • \n\t\t
  • Design and technical requirements
  • \n\t
\n
\n\n
\n\t

A Seat at the Table

\n\t

Design is becoming increasingly more valued

\n\t
    \n\t\t
  • The most successful products are ones that are well designed and provide a superior UX released by organizations that have design-centric leadership
  • \n\t\t
  • Getting there is as simple as building better relationships with your designers and empowering them to make decisions
  • \n\t
\n
\n\n
\n\t

Chapter 13

\n\t

Designing for Vision

\n\t
\n\t\t

An artist is not paid for his labor but for his vision.

\n\t\t

―James McNeill Whistler

\n\t
\n
\n\n
\n\t

Recognizing Our Power

\n
\n\n
\n\n
\n\t

Recognizing Our Power

\n\t

Images make the unreal real

\n\t

Having the skills to think and execute visually gets decision makers excited about our ideas

\n\t
    \n\t\t
  • \n\t\t\tDesigning for vision gives us a creative outlet\n\t\t
  • \n\t\t
  • \n\t\t\tIt creates a conversation with other people\n\t\t\n\t\t
  • \n\t\t\tIt brings people together\n\t\t
  • \n\t\t
  • \n\t\t\tIt builds credibility\n\t\t
  • \n\t\t
  • \n\t\t\tIt lives beyond us\n\t\t
  • \n\t
\n
\n\n
\n\t

Practicing Creativity

\n\t

Designing for vision requires us to step away from our projects and dream a little

\n\t
    \n\t\t
  • Find inspiration creating something new requires inspiration, and it's all around us
  • \n\t\t
  • See UX everywhere allow the UX of non-digital things to inform your designs
  • \n\t\t
  • Use a different canvas find something that allows you to create without worrying about delivering
  • \n\t\t
  • Ideate and iterate force yourself to think of different ideas and then refine them
  • \n\t
\n
\n\n
\n\t

Making it Happen

\n\t

Designing for vision isn't usually part of our job description, so finding the value in it is often hard

\n
\n\n
\n\t

Making it Happen

\n\t

Find a different routine

\n\t

It's important to find different time, space, activity and materials to help you relax, free your mind and create.

\n
\n\n
\n\t

Making Stuff Up

\n
\n\n
\n\n
\n\t

Making Stuff Up

\n\t

When it comes down to it, designing for vision is about making things up, expressing them in a tangible way and using visuals to create excitement about the future

\n\t
    \n\t\t
  • Don't limit yourself
  • \n\t\t
  • Start from scratch
  • \n\t\t
  • Don't obsess over the details
  • \n\t\t
  • Make lots of different versions
  • \n\t
\n
\n\n
\n\t

Making Stuff Up

\n\t

Even if our ideas fall flat, they still create value for us, as designers, in learning to communicate about design and build momentum to be successful

\n
\n\n
\n\t

Taking Your Ideas to the Right People

\n\t

Relationships are everything; invest in them

\n
\n\n
\n\t

More Than Pixels

\n\t

Talking about our design is hard because it is an extension of our experiences, regardless of how hard we try to remove ourselves

\n\t

Your designs do not speak for themselves

\n
\n\n
\n\t

Design is always changing, but being a better communicator is something that we can always have.

\n
\n\n\n\n\n\n\n\t\t\t\t
\n\t\t\t\t\t

Questions?

\n\t\t\t\t\t
\n\t\t\t\t\t

\n\t\t\t\t\t\tteamgaslight.com\n\t\t\t\t\t

\n\t\t\t\t
\n\t\t\t
\n\t\t
\n\t\t\n\t\t\n\n\t\t\n\t\n\n", "meta": {"content_hash": "57ce58885d05aaa989952551084fb07f", "timestamp": "", "source": "github", "line_count": 1228, "max_line_length": 349, "avg_line_length": 39.66530944625407, "alnum_prop": 0.6365148124576567, "repo_name": "gaslight/design-book-club_ADD", "id": "a1a28981e76348818bdb56a1d3a0843cb339ba69", "size": "48735", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.html", "mode": "33261", "license": "mit", "language": [{"name": "CSS", "bytes": "205654"}, {"name": "HTML", "bytes": "207720"}, {"name": "JavaScript", "bytes": "261184"}]}} {"text": "\ufeff//-----------------------------------------------------------------------\n// \n// Copyright 2012-2013 Rare Crowds, Inc.\n//\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n// \n//-----------------------------------------------------------------------\n\nusing System;\nusing System.Collections;\nusing System.Collections.Generic;\nusing System.Configuration;\nusing System.Globalization;\nusing System.IO;\nusing System.Linq;\nusing System.Reflection;\nusing Activities;\nusing ActivityTestUtilities;\nusing ConfigManager;\nusing DataAccessLayer;\nusing DeliveryNetworkUtilities;\nusing EntityTestUtilities;\nusing EntityUtilities;\nusing Google.Api.Ads.Common.Util;\nusing Google.Api.Ads.Dfp.Lib;\nusing GoogleDfpActivities;\nusing GoogleDfpClient;\nusing GoogleDfpUtilities;\nusing Microsoft.VisualStudio.TestTools.UnitTesting;\nusing Rhino.Mocks;\nusing Rhino.Mocks.Constraints;\nusing TestUtilities;\nusing Dfp = Google.Api.Ads.Dfp.v201206;\n\nnamespace GoogleDfpIntegrationTests\n{\n /// Tests for ExportCreativeActivity\n [TestClass]\n public class ExportCreativeActivityFixture : DfpActivityFixtureBase\n {\n /// Gets the bytes of a 300x250 test GIF\n private byte[] TestImageBytes\n {\n get { return EmbeddedResourceHelper.GetEmbeddedResourceAsByteArray(this.GetType(), \"Resources.test.gif\"); }\n }\n\n /// Initialize per-test object(s)/settings\n [TestInitialize]\n public override void TestInitialize()\n {\n base.TestInitialize();\n }\n\n /// Test exporting an image creative\n [TestMethod]\n public void ExportImageCreative()\n {\n var companyEntity = TestNetwork.AdvertiserCompanyEntity;\n var creativeEntity = this.CreateTestImageAdCreative();\n this.AddEntitiesToMockRepository(companyEntity, creativeEntity);\n\n var request = new ActivityRequest\n {\n Task = GoogleDfpActivityTasks.ExportCreative,\n Values =\n {\n { EntityActivityValues.AuthUserId, Guid.NewGuid().ToString(\"N\") },\n { EntityActivityValues.CompanyEntityId, companyEntity.ExternalEntityId.ToString() },\n { EntityActivityValues.CreativeEntityId, creativeEntity.ExternalEntityId.ToString() },\n }\n };\n \n var activity = this.CreateActivity();\n var result = activity.Run(request);\n\n // Validate result\n ActivityTestHelpers.AssertValidSuccessResult(result);\n ActivityTestHelpers.AssertResultHasValues(\n result,\n EntityActivityValues.CreativeEntityId,\n GoogleDfpActivityValues.CreativeId);\n \n // Verify creative was created correctly in DFP\n long creativeId;\n Assert.IsTrue(long.TryParse(result.Values[GoogleDfpActivityValues.CreativeId], out creativeId));\n var creative = this.DfpClient.GetCreatives(new[] { creativeId }).FirstOrDefault() as Dfp.ImageCreative;\n Assert.IsNotNull(creative);\n Assert.AreEqual(creativeId, creative.id);\n Assert.AreEqual(creativeEntity.ExternalName, creative.name);\n Assert.AreEqual(TestNetwork.AdvertiserId, creative.advertiserId);\n Assert.IsNotNull(creative.previewUrl);\n Assert.IsFalse(creative.size.isAspectRatio);\n Assert.AreEqual(300, creative.size.width);\n Assert.AreEqual(250, creative.size.height);\n Assert.AreEqual(creativeEntity.GetClickUrl(), creative.destinationUrl);\n }\n\n /// Test exporting third party tag creative\n [TestMethod]\n [Ignore]\n public void ExportThirdPartyTagCreative()\n {\n Assert.Fail();\n }\n\n /// Creates a test creative entity for a 300x250 image ad\n /// The creative entity\n private CreativeEntity CreateTestImageAdCreative()\n {\n return EntityTestHelpers.CreateTestImageAdCreativeEntity(\n new EntityId(),\n \"Test Creative - \" + this.UniqueId,\n 300,\n 250,\n \"http://www.rarecrowds.com/\",\n this.TestImageBytes);\n }\n }\n}\n", "meta": {"content_hash": "847e9e06212c583e11ee9ccbb90d1f5d", "timestamp": "", "source": "github", "line_count": 129, "max_line_length": 119, "avg_line_length": 39.06976744186046, "alnum_prop": 0.6398809523809523, "repo_name": "chinnurtb/OpenAdStack", "id": "7f5fcdc0845d8ee2b35f78afeffe017c76425439", "size": "5042", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "GoogleDfpActivities/GoogleDfpIntegrationTests/ExportCreativeActivityFixture.cs", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "programinit()\n\n#include \n#include \n\n#include \n#include \n\nfunction main() {\n\tprintl(\"testd says 'Hello World!'\");\n\t//CREATE LABELLED COMMON\n\tmv.labelledcommon[1]=new win_common;\n\n//\tperform(\"initgeneral\");\n\tcall initgeneral();\n asm(\" int $03\");\n\twin.srcfile=\"\";\n\twin.datafile=\"\";\n\twin.orec=\"\";\n\twin.wlocked=\"\";\n\twin.reset=\"\";\n\twin.valid=\"\";\n\n\tcall testd2();\n\treturn 0;\n}\n\ndebugprogramexit()\n\n", "meta": {"content_hash": "0945bb063bcbbe4c280322b09c022a45", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 37, "avg_line_length": 14.827586206896552, "alnum_prop": 0.6511627906976745, "repo_name": "rdmenezes/exodusdb", "id": "0caada7d87e34cb7641c91bf72cd649fbe0b168d", "size": "458", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "service/service2/test/testd.cpp", "mode": "33188", "license": "mit", "language": [{"name": "ASP", "bytes": "101254"}, {"name": "ApacheConf", "bytes": "97"}, {"name": "C", "bytes": "59938"}, {"name": "C#", "bytes": "7029"}, {"name": "C++", "bytes": "2637888"}, {"name": "CMake", "bytes": "47750"}, {"name": "CSS", "bytes": "4304"}, {"name": "HTML", "bytes": "440897"}, {"name": "Java", "bytes": "1544"}, {"name": "JavaScript", "bytes": "811589"}, {"name": "Makefile", "bytes": "5330"}, {"name": "NSIS", "bytes": "81704"}, {"name": "PHP", "bytes": "18675"}, {"name": "Perl", "bytes": "3746"}, {"name": "Python", "bytes": "15897"}, {"name": "Shell", "bytes": "1116985"}, {"name": "Visual Basic", "bytes": "509"}]}} {"text": "\n\n \n \n \n color: Not compatible \ud83d\udc7c\n \n \n \n \n \n \n \n \n \n \n
\n
\n \n
\n
\n
\n
\n \u00ab Up\n

\n color\n \n 1.4.0\n Not compatible \ud83d\udc7c\n \n

\n

\ud83d\udcc5 (2022-10-08 09:30:10 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-threads        base\nbase-unix           base\ncamlp5              7.14        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\nconf-perl           2           Virtual package relying on perl\ncoq                 8.8.2       Formal proof management system\nnum                 1.4         The legacy Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.07.1      The OCaml compiler (virtual package)\nocaml-base-compiler 4.07.1      Official release 4.07.1\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.5       A library manager for OCaml\n# opam file:\nopam-version: "2.0"\nmaintainer: "frederic.blanqui@inria.fr"\nauthors: [\n  "Fr\u00e9d\u00e9ric Blanqui"\n  "Adam Koprowski"\n  "S\u00e9bastien Hinderer"\n  "Pierre-Yves Strub"\n  "Sidi Ould Biha"\n  "Solange Coupet-Grimal"\n  "William Delobel"\n  "Hans Zantema"\n  "St\u00e9phane Leroux"\n  "L\u00e9o Ducas"\n  "Johannes Waldmann"\n  "Qiand Wang"\n  "Lianyi Zhang"\n  "Sorin Stratulat"\n]\nlicense: "CeCILL"\nhomepage: "http://color.inria.fr/"\nbug-reports: "color@inria.fr"\nbuild: [\n  [make "-j%{jobs}%"]\n]\ninstall: [make "-f" "Makefile.coq" "install"]\ndepends: [\n  "ocaml"\n  "coq" {>= "8.7" & < "8.8~"}\n  "coq-bignums" {>= "8.7" & < "8.8~"}\n]\ntags: [\n  "date:2017-11-10"\n  "logpath:CoLoR"\n  "category:Computer Science/Algorithms/Correctness proofs of algorithms"\n  "category:Computer Science/Data Types and Data Structures"\n  "category:Computer Science/Lambda Calculi"\n  "category:Mathematics/Algebra"\n  "category:Mathematics/Combinatorics and Graph Theory"\n  "category:Mathematics/Logic/Type theory"\n  "category:Miscellaneous/Extracted Programs/Type checking unification and normalization"\n  "keyword:rewriting"\n  "keyword:termination"\n  "keyword:lambda calculus"\n  "keyword:list"\n  "keyword:multiset"\n  "keyword:polynomial"\n  "keyword:vectors"\n  "keyword:matrices"\n  "keyword:FSet"\n  "keyword:FMap"\n  "keyword:term"\n  "keyword:context"\n  "keyword:substitution"\n  "keyword:universal algebra"\n  "keyword:varyadic term"\n  "keyword:string"\n  "keyword:alpha-equivalence"\n  "keyword:de Bruijn indices"\n  "keyword:simple types"\n  "keyword:matching"\n  "keyword:unification"\n  "keyword:relation"\n  "keyword:ordering"\n  "keyword:quasi-ordering"\n  "keyword:lexicographic ordering"\n  "keyword:ring"\n  "keyword:semiring"\n  "keyword:well-foundedness"\n  "keyword:noetherian"\n  "keyword:finitely branching"\n  "keyword:dependent choice"\n  "keyword:infinite sequences"\n  "keyword:non-termination"\n  "keyword:loop"\n  "keyword:graph"\n  "keyword:path"\n  "keyword:transitive closure"\n  "keyword:strongly connected component"\n  "keyword:topological ordering"\n  "keyword:rpo"\n  "keyword:horpo"\n  "keyword:dependency pair"\n  "keyword:dependency graph"\n  "keyword:semantic labeling"\n  "keyword:reducibility"\n  "keyword:Girard"\n  "keyword:fixpoint theorem"\n  "keyword:Tarski"\n  "keyword:pigeon-hole principle"\n  "keyword:Ramsey theorem"\n]\nsynopsis: "A library on rewriting theory and termination"\nurl {\n  src: "http://files.inria.fr/blanqui/color/color.1.4.0.tar.gz"\n  checksum: "md5=012e9be1fee95f5bea00cd91133302c1"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-color.1.4.0 coq.8.8.2
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.8.2).\nThe following dependencies couldn't be met:\n  - coq-color -> coq < 8.8~ -> ocaml < 4.06.0\n      base of this switch (use `--unlock-base' to force)\nYour request can't be satisfied:\n  - No available version of coq satisfies the constraints\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-color.1.4.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "8d91af6df00d1a3ae0be1f5e53ff74c3", "timestamp": "", "source": "github", "line_count": 238, "max_line_length": 159, "avg_line_length": 38.36134453781513, "alnum_prop": 0.5904709748083242, "repo_name": "coq-bench/coq-bench.github.io", "id": "04bc43da7728c1bf5cdf3c31c6ba0b9fc3f59852", "size": "9160", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.07.1-2.0.6/released/8.8.2/color/1.4.0.html", "mode": "33188", "license": "mit", "language": []}} {"text": " :feature do\n stub_authorization!\n\n after do\n Capybara.ignore_hidden_elements = true\n end\n\n before do\n Capybara.ignore_hidden_elements = false\n end\n\n context \"managing taxons\" do\n def selected_taxons\n find(\"#product_taxon_ids\").value.split(',').map(&:to_i).uniq\n end\n\n it \"should allow an admin to manage taxons\", :js => true do\n taxon_1 = create(:taxon)\n taxon_2 = create(:taxon, :name => 'Clothing')\n product = create(:product)\n product.taxons << taxon_1\n\n visit spree.admin_path\n click_link \"Products\"\n within(\"table.index\") do\n click_icon :edit\n end\n\n expect(find(\".select2-search-choice\").text).to eq(taxon_1.name)\n expect(selected_taxons).to match_array([taxon_1.id])\n\n select2_search \"Clothing\", :from => \"Taxons\"\n click_button \"Update\"\n expect(selected_taxons).to match_array([taxon_1.id, taxon_2.id])\n\n # Regression test for #2139\n sleep(1)\n expect(first(\".select2-search-choice\", text: taxon_1.name)).to be_present\n expect(first(\".select2-search-choice\", text: taxon_2.name)).to be_present\n end\n end\nend\n", "meta": {"content_hash": "80ce74674e938cb4466a3aa2edab34db", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 79, "avg_line_length": 27.136363636363637, "alnum_prop": 0.6381909547738693, "repo_name": "sunny2601/spree1", "id": "254d52aee286205457d4c5cd07a50cf9501f0c9b", "size": "1194", "binary": false, "copies": "11", "ref": "refs/heads/master", "path": "backend/spec/features/admin/products/edit/taxons_spec.rb", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "201085"}, {"name": "CoffeeScript", "bytes": "32035"}, {"name": "HTML", "bytes": "470128"}, {"name": "JavaScript", "bytes": "37692"}, {"name": "Ruby", "bytes": "2012087"}, {"name": "Shell", "bytes": "2346"}]}} {"text": "package internal\n\nimport (\n\t\"strconv\"\n\n\t\"github.com/kataras/iris/v12/context\"\n\n\t\"golang.org/x/text/feature/plural\"\n\t\"golang.org/x/text/message\"\n\t\"golang.org/x/text/message/catalog\"\n)\n\n// PluralCounter if completes by an input argument of a message to render,\n// then the plural renderer will resolve the plural count\n// and any variables' counts. This is useful when the data is not a type of Map or integers.\ntype PluralCounter interface {\n\t// PluralCount returns the plural count of the message.\n\t// If returns -1 then this is not a valid plural message.\n\tPluralCount() int\n\t// VarCount should return the variable count, based on the variable name.\n\tVarCount(name string) int\n}\n\n// PluralMessage holds the registered Form and the corresponding Renderer.\n// It is used on the `Message.AddPlural` method.\ntype PluralMessage struct {\n\tForm PluralForm\n\tRenderer Renderer\n}\n\ntype independentPluralRenderer struct {\n\tkey string\n\tprinter *message.Printer\n}\n\nfunc newIndependentPluralRenderer(c *Catalog, loc *Locale, key string, msgs ...catalog.Message) (Renderer, error) {\n\tbuilder := catalog.NewBuilder(catalog.Fallback(c.Locales[0].tag))\n\tif err := builder.Set(loc.tag, key, msgs...); err != nil {\n\t\treturn nil, err\n\t}\n\tprinter := message.NewPrinter(loc.tag, message.Catalog(builder))\n\treturn &independentPluralRenderer{key, printer}, nil\n}\n\nfunc (m *independentPluralRenderer) Render(args ...interface{}) (string, error) {\n\treturn m.printer.Sprintf(m.key, args...), nil\n}\n\n// A PluralFormDecoder should report and return whether\n// a specific \"key\" is a plural one. This function\n// can be implemented and set on the `Options` to customize\n// the plural forms and their behavior in general.\n//\n// See the `DefaultPluralFormDecoder` package-level\n// variable for the default implementation one.\ntype PluralFormDecoder func(loc context.Locale, key string) (PluralForm, bool)\n\n// DefaultPluralFormDecoder is the default `PluralFormDecoder`.\n// Supprots \"zero\", \"one\", \"two\", \"other\", \"=x\", \"x\".\nvar DefaultPluralFormDecoder = func(_ context.Locale, key string) (PluralForm, bool) {\n\tif isDefaultPluralForm(key) {\n\t\treturn pluralForm(key), true\n\t}\n\n\treturn nil, false\n}\n\nfunc isDefaultPluralForm(s string) bool {\n\tswitch s {\n\tcase \"zero\", \"one\", \"two\", \"other\":\n\t\treturn true\n\tdefault:\n\t\tif len(s) > 1 {\n\t\t\tch := s[0]\n\t\t\tif ch == '=' || ch == '<' || ch == '>' {\n\t\t\t\tif isDigit(s[1]) {\n\t\t\t\t\treturn true\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn false\n\t}\n}\n\n// A PluralForm is responsible to decode\n// locale keys to plural forms and match plural forms\n// based on the given pluralCount.\n//\n// See `pluralForm` package-level type for a default implementation.\ntype PluralForm interface {\n\tString() string\n\t// the string is a verified plural case's raw string value.\n\t// Field for priority on which order to register the plural cases.\n\tLess(next PluralForm) bool\n\tMatchPlural(pluralCount int) bool\n}\n\ntype pluralForm string\n\nfunc (f pluralForm) String() string {\n\treturn string(f)\n}\n\nfunc (f pluralForm) Less(next PluralForm) bool {\n\tform1 := f.String()\n\tform2 := next.String()\n\n\t// Order by\n\t// - equals,\n\t// - less than\n\t// - greater than\n\t// - \"zero\", \"one\", \"two\"\n\t// - rest is last \"other\".\n\tdig1, typ1, hasDig1 := formAtoi(form1)\n\tif typ1 == eq {\n\t\treturn true\n\t}\n\n\tdig2, typ2, hasDig2 := formAtoi(form2)\n\tif typ2 == eq {\n\t\treturn false\n\t}\n\n\t// digits smaller, number.\n\tif hasDig1 {\n\t\treturn !hasDig2 || dig1 < dig2\n\t}\n\n\tif hasDig2 {\n\t\treturn false\n\t}\n\n\tif form1 == \"other\" {\n\t\treturn false // other go to last.\n\t}\n\n\tif form2 == \"other\" {\n\t\treturn true\n\t}\n\n\tif form1 == \"zero\" {\n\t\treturn true\n\t}\n\n\tif form2 == \"zero\" {\n\t\treturn false\n\t}\n\n\tif form1 == \"one\" {\n\t\treturn true\n\t}\n\n\tif form2 == \"one\" {\n\t\treturn false\n\t}\n\n\tif form1 == \"two\" {\n\t\treturn true\n\t}\n\n\tif form2 == \"two\" {\n\t\treturn false\n\t}\n\n\treturn false\n}\n\nfunc (f pluralForm) MatchPlural(pluralCount int) bool {\n\tswitch f {\n\tcase \"other\":\n\t\treturn true\n\tcase \"=0\", \"zero\":\n\t\treturn pluralCount == 0\n\tcase \"=1\", \"one\":\n\t\treturn pluralCount == 1\n\tcase \"=2\", \"two\":\n\t\treturn pluralCount == 2\n\tdefault:\n\t\t// <5 or =5\n\n\t\tn, typ, ok := formAtoi(string(f))\n\t\tif !ok {\n\t\t\treturn false\n\t\t}\n\n\t\tswitch typ {\n\t\tcase eq:\n\t\t\treturn n == pluralCount\n\t\tcase lt:\n\t\t\treturn pluralCount < n\n\t\tcase gt:\n\t\t\treturn pluralCount > n\n\t\tdefault:\n\t\t\treturn false\n\t\t}\n\t}\n}\n\nfunc makeSelectfVars(text string, vars []Var, insidePlural bool) ([]catalog.Message, []Var) {\n\tnewVars := sortVars(text, vars)\n\tnewVars = removeVarsDuplicates(newVars)\n\tmsgs := selectfVars(newVars, insidePlural)\n\treturn msgs, newVars\n}\n\nfunc selectfVars(vars []Var, insidePlural bool) []catalog.Message {\n\tmsgs := make([]catalog.Message, 0, len(vars))\n\tfor _, variable := range vars {\n\t\targth := variable.Argth\n\t\tif insidePlural {\n\t\t\targth++\n\t\t}\n\n\t\tmsg := catalog.Var(variable.Name, plural.Selectf(argth, variable.Format, variable.Cases...))\n\t\t// fmt.Printf(\"%s:%d | cases | %#+v\\n\", variable.Name, variable.Argth, variable.Cases)\n\t\tmsgs = append(msgs, msg)\n\t}\n\n\treturn msgs\n}\n\nconst (\n\teq uint8 = iota + 1\n\tlt\n\tgt\n)\n\nfunc formType(ch byte) uint8 {\n\tswitch ch {\n\tcase '=':\n\t\treturn eq\n\tcase '<':\n\t\treturn lt\n\tcase '>':\n\t\treturn gt\n\t}\n\n\treturn 0\n}\n\nfunc formAtoi(form string) (int, uint8, bool) {\n\tif len(form) < 2 {\n\t\treturn -1, 0, false\n\t}\n\n\ttyp := formType(form[0])\n\tif typ == 0 {\n\t\treturn -1, 0, false\n\t}\n\n\tdig, err := strconv.Atoi(form[1:])\n\tif err != nil {\n\t\treturn -1, 0, false\n\t}\n\treturn dig, typ, true\n}\n\nfunc isDigit(ch byte) bool {\n\treturn '0' <= ch && ch <= '9'\n}\n", "meta": {"content_hash": "e488202c3e762667465ce73ac23d024b", "timestamp": "", "source": "github", "line_count": 261, "max_line_length": 115, "avg_line_length": 20.973180076628353, "alnum_prop": 0.6691633175009134, "repo_name": "kataras/iris", "id": "75a01667d939c5f1577b0276e8f6b9855591d60e", "size": "5474", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "i18n/internal/plural.go", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "15415"}, {"name": "Dockerfile", "bytes": "2408"}, {"name": "Go", "bytes": "2521098"}, {"name": "HTML", "bytes": "59845"}, {"name": "JavaScript", "bytes": "98505"}, {"name": "Pug", "bytes": "1236"}, {"name": "Shell", "bytes": "39"}]}} {"text": " \r\n\r\n\r\n\t\r\n\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\t\r\n\t\t\r\n\t\r\n\r\n\t\r\n\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\t\r\n\t\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\t\r\n\t\t\r\n\t\r\n\t\r\n\t\r\n\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\t\r\n\t\r\n", "meta": {"content_hash": "cf9a936644d75d3ea36a7e705f7618b2", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 83, "avg_line_length": 39.62222222222222, "alnum_prop": 0.635445877734156, "repo_name": "wingcrawler/shop_bbs", "id": "ab2daa0767f6e3298790af85b698d4eb90f81c4f", "size": "1783", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/resources/log4j.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "2269598"}, {"name": "HTML", "bytes": "294987"}, {"name": "Java", "bytes": "951244"}, {"name": "JavaScript", "bytes": "3130107"}, {"name": "PHP", "bytes": "6307"}]}} {"text": "/**\n * Prevajalnik za programski jezik PREV.\n *

\n * Celotna izvorna koda prevajalnika za programski jezik PREV je zbrana v paketu {@link compiler} in njegovih podpaketih.\n *\n * @author sliva\n */\npackage compiler;", "meta": {"content_hash": "1ac830c989edb3428b854b04cb55405d", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 121, "avg_line_length": 26.875, "alnum_prop": 0.7302325581395349, "repo_name": "jarheadSLO/ProteusCompiler", "id": "f0809f7b14d6f493006e94e1a8e26ca621acbc2f", "size": "215", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/compiler/package-info.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "288143"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.IO;\nusing System.Linq;\nusing System.Text.RegularExpressions;\nusing System.Xml.Linq;\n\nnamespace Dextem\n{\n ///

\n /// Controls name processing of method 'M:' and type 'T:' <member> nodes. This class cannot be inherited.\n /// \n public sealed class MethodTypeProcessor : BaseProcessor\n {\n /// \n /// Creates a new instance of MethodTypeProcessor using the given ProcessorRegistry.\n /// \n /// The ProcessorRegistry instance to use.\n public MethodTypeProcessor(ProcessorRegistry registry) : base(registry) { }\n\n /// \n /// Executes name processing of the current method 'M:' or type 'T:' <member> element.\n /// \n /// The current StringWriter to use.\n /// The current root element to process.\n /// The current processing context.\n /// The updated processing context.\n public override Dictionary Process(StringWriter writer, XElement root, Dictionary context)\n {\n Args.IsNotNull(() => writer, () => root, () => context);\n\n var memberName = root.Attribute(XName.Get(\"name\")).Value;\n char memberType = memberName[0];\n\n if (memberType == 'M')\n {\n memberName = MethodTypeProcessor.RearrangeTypeParametersInContext(root, memberName, context, false);\n memberName = MethodTypeProcessor.RearrangeParametersInContext(root, memberName, context);\n }\n\n context[\"memberName\"] = memberName;\n\n if (memberType == 'T')\n {\n var typeNameStartIndex = 3 + context[\"assembly\"].Length;\n var scrubbedName = MethodTypeProcessor.ReplaceForTypeParameters(root, memberName, false, context);\n var shortMemberName = scrubbedName.Substring(typeNameStartIndex);\n writer.WriteLine(\"\\n## {0}\\n\", shortMemberName);\n context[\"typeName\"] = shortMemberName;\n }\n\n context[\"memberType\"] = memberType.ToString();\n\n context[\"lastNode\"] = memberName;\n\n return base.Process(writer, root, context);\n }\n\n private static List GetParameterTypes(string memberName)\n {\n var parameterTypes = new List();\n\n Match match = Regex.Match(memberName, \"\\\\((.*)\\\\)\");\n\n // Groups[0] = (Type, Type, Type)\n // Groups[1] = Type, Type, Type\n\n if (match.Groups.Count < 1)\n {\n return parameterTypes;\n }\n\n string rawParameterString = string.Empty;\n\n if (match.Groups.Count == 1)\n {\n rawParameterString = match.Groups[0].Value.Replace(\"(\", \"\").Replace(\")\", \"\").Replace(\" \", \"\");\n }\n\n if (match.Groups.Count == 2)\n {\n rawParameterString = match.Groups[1].Value.Replace(\" \", \"\");\n }\n\n var rawParameterArray = rawParameterString.Split(',');\n\n for (var i = 0; i < rawParameterArray.Length; i++)\n {\n var raw = rawParameterArray[i];\n\n var isGeneric = (raw.Contains(\"{\") || raw.Contains(\"<\") || raw.Contains(\"}\") || raw.Contains(\">\"));\n var isOpenOnly = (isGeneric && !(raw.Contains(\"}\") || raw.Contains(\">\")));\n\n if (!isGeneric || isOpenOnly)\n {\n parameterTypes.Add(raw);\n continue;\n }\n\n var isCloseOnly = (isGeneric && !(raw.Contains(\"{\") || raw.Contains(\"<\")));\n\n if (isCloseOnly)\n {\n parameterTypes[parameterTypes.Count - 1] += \", \" + raw;\n continue;\n }\n }\n\n return parameterTypes;\n }\n\n private static string RearrangeParametersInContext(XElement methodMember, string memberName, Dictionary context)\n {\n var parameterTypes = MethodTypeProcessor.GetParameterTypes(memberName);\n\n List paramElems = new List(methodMember.Elements(\"param\"));\n if (parameterTypes.Count != paramElems.Count)\n {\n // the parameter count do not match, we can't do the rearrangement.\n return memberName;\n }\n\n string newParamString = \"\";\n for (int i = 0; i < paramElems.Count; i++)\n {\n XElement paramElem = paramElems[i];\n string paramName = paramElem.Attribute(XName.Get(\"name\")).Value;\n string paramType = parameterTypes[i];\n if (newParamString.Length > 0)\n {\n newParamString += \", \";\n }\n newParamString += paramName;\n context[paramName] = paramType;\n }\n\n string newMethodPrototype = Regex.Replace(memberName,\n \"\\\\(.*\\\\)\",\n \"(\" + newParamString + \")\");\n\n return newMethodPrototype;\n }\n\n private static string RearrangeTypeParametersInContext(XElement member, string memberName, Dictionary context, bool skipReplace)\n {\n var methodPrototype = memberName;\n if (!skipReplace)\n {\n methodPrototype = MethodTypeProcessor.ReplaceForTypeParameters(member, memberName, true, context);\n }\n\n var matches = Regex.Matches(methodPrototype, \"\\\\{(`\\\\d)+}\"); //Matches: {'0} && {'1'2} //M:GraphExec.BaseEventAggregator.GetEventType(GraphExec.IHandle{'0})\n\n var typedParams = matches.ToList();\n var replaceTypedParamString = typedParams.Select(x => x.Groups[0].Value);\n\n if (!replaceTypedParamString.Any())\n {\n // nothing to do...\n return methodPrototype;\n }\n\n var paramElems = new List(member.Elements(\"typeparam\"));\n\n var newParamString = \"\";\n var indexList = new List();\n\n foreach (var replaceString in replaceTypedParamString)\n {\n newParamString = \"<\";\n\n var scrubBrackets = replaceString.Substring(1, replaceString.Length - 3);\n\n indexList = scrubBrackets.Split('\\'').Cast().ToList(); // \"1, 2\"\n\n if (indexList.Count() <= paramElems.Count)\n {\n foreach (var index in indexList)\n {\n if (newParamString != \"<\")\n {\n newParamString += \", \";\n }\n\n var typeParam = paramElems[index];\n\n var paramType = typeParam.Attribute(XName.Get(\"name\")).Value;\n\n newParamString += paramType;\n }\n }\n else\n {\n newParamString += \"*Unknown*\";\n }\n\n newParamString += \">\";\n\n methodPrototype = methodPrototype.Replace(replaceString, newParamString);\n }\n\n return methodPrototype;\n }\n\n private static string ReplaceForTypeParameters(XElement methodMember, string memberName, bool methodType, Dictionary context)\n {\n string methodPrototype = memberName;\n var matches = Regex.Matches(methodPrototype, \"\\\\`(\\\\d)\"); //Matches: '1 and 1 //M:GraphExec.BaseEventAggregator.GetEventType`1\n\n // Match 1 = Type Parameter Count ('1)\n\n if (matches.Count == 0)\n {\n return methodPrototype;\n }\n\n var typeParamCount = Convert.ToInt32(matches[0].Groups[1].Value);\n\n List paramElems = new List(methodMember.Elements(\"typeparam\"));\n if (typeParamCount != paramElems.Count)\n {\n System.Diagnostics.Debug.WriteLine(\"Type Parameters and TypeParamList not equal for replacing generic types' type parameters.\");\n // the parameter count do not match, we can't do the rearrangement.\n return methodPrototype;\n }\n\n string newParamString = \"\";\n for (int i = 0; i < paramElems.Count; i++)\n {\n XElement paramElem = paramElems[i];\n string paramType = paramElem.Attribute(XName.Get(\"name\")).Value;\n if (newParamString != \"\")\n {\n newParamString += \", \";\n }\n newParamString += paramType;\n context[paramType] = paramType;\n }\n\n var paramMatches = Regex.Matches(methodPrototype, \"\\\\{``\\\\d}\");\n if (paramMatches.Count > 0) // {``0} and {``1} and {``2``3}\n {\n methodPrototype = MethodTypeProcessor.RearrangeTypeParametersInContext(methodMember, methodPrototype, context, true);\n }\n\n if (methodType)\n {\n string newMethodPrototype = Regex.Replace(methodPrototype,\n \"\\\\``\\\\d\",\n \"<\" + newParamString + \">\");\n\n return newMethodPrototype;\n }\n else\n {\n string newMethodPrototype = Regex.Replace(methodPrototype,\n \"\\\\`\\\\d\",\n \"<\" + newParamString + \">\");\n\n return newMethodPrototype;\n }\n }\n }\n}\n", "meta": {"content_hash": "e2e10abfe5e78f82de6d1a28577a327a", "timestamp": "", "source": "github", "line_count": 264, "max_line_length": 168, "avg_line_length": 37.25757575757576, "alnum_prop": 0.5262301748678324, "repo_name": "GraphExec/Dextem", "id": "1e7181d4fb0f1d115fd268143b5be7a6c63dbb68", "size": "9838", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Dextem/MethodTypeProcessor.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "45999"}]}} {"text": "SHORT_NAME := deis-integration\n\nexport GO15VENDOREXPERIMENT=1\n\n# dockerized development environment variables\nREPO_PATH := github.com/arschles/${SHORT_NAME}\nDEV_ENV_IMAGE := quay.io/deis/go-dev:0.2.0\nDEV_ENV_WORK_DIR := /go/src/${REPO_PATH}\nDEV_ENV_PREFIX := docker run --rm -v ${CURDIR}:${DEV_ENV_WORK_DIR} -w ${DEV_ENV_WORK_DIR}\nDEV_ENV_CMD := ${DEV_ENV_PREFIX} ${DEV_ENV_IMAGE}\n\nLDFLAGS := \"-s -X main.version=${VERSION}\"\nBINDIR := ./rootfs/bin\n\nREGISTRY ?= ${DEV_REGISTRY}\nIMAGE_PREFIX ?= arschles\nVERSION ?= git-$(shell git rev-parse --short HEAD)\n\nIMAGE := ${REGISTRY}${IMAGE_PREFIX}/${SHORT_NAME}:${VERSION}\n\nbootstrap:\n\t${DEV_ENV_CMD} glide up\n\nbuild:\n\tmkdir -p ${BINDIR}\n\t${DEV_ENV_PREFIX} -e CGO_ENABLED=0 ${DEV_ENV_IMAGE} go build -a -installsuffix cgo -ldflags '-s' -o $(BINDIR)/boot || exit 1\n\ndocker-build:\n\t# build the main image\n\tdocker build --rm -t ${IMAGE} rootfs\n\ndocker-push: docker-build\n\tdocker push ${IMAGE}\n", "meta": {"content_hash": "2b05663106b54cbf9c43c7bc054ba504", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 125, "avg_line_length": 28.242424242424242, "alnum_prop": 0.6856223175965666, "repo_name": "arschles/deis-integration", "id": "db4f5af0ee37baebd2024d1d5ca5e452192a4ff6", "size": "932", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Makefile", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Go", "bytes": "3270"}, {"name": "Makefile", "bytes": "932"}]}} {"text": "\"use strict\";\nangular.module('myApp.login', ['firebase.utils', 'firebase.auth', 'ngRoute'])\n\n .config(['$routeProvider', function($routeProvider) {\n $routeProvider.when('/login', {\n controller: 'LoginCtrl',\n templateUrl: 'login/login.html'\n });\n }])\n\n .controller('LoginCtrl', ['$scope', 'Auth', '$location', 'fbutil', function($scope, Auth, $location, fbutil) {\n $scope.email = null;\n $scope.pass = null;\n $scope.confirm = null;\n $scope.createMode = false;\n\n // $scope.login = function(email, pass) {\n // $scope.err = null;\n // Auth.$authWithPassword({ email: email, password: pass }, {rememberMe: true})\n // .then(function(/* user */) {\n // $location.path('/account');\n // }, function(err) {\n // $scope.err = errMessage(err);\n // });\n // };\n \n $scope.googleLogin = function() {\n /*console.log('s', $scope); \n console.log('a', Auth); \n console.log('l', $location); \n console.log('f', fbutil);*/\n \n \n Auth.$authWithOAuthPopup(\"google\")\n .then(function(authData) {\n console.log(\"Logged in as:\", authData.uid);\n })\n .catch(function(error) {\n console.error(\"Authentication failed:\", error);\n });\n };\n\n // $scope.createAccount = function() {\n // $scope.err = null;\n // if( assertValidAccountProps() ) {\n // var email = $scope.email;\n // var pass = $scope.pass;\n // // create user credentials in Firebase auth system\n // Auth.$createUser({email: email, password: pass})\n // .then(function() {\n // // authenticate so we have permission to write to Firebase\n // return Auth.$authWithPassword({ email: email, password: pass });\n // })\n // .then(function(user) {\n // // create a user profile in our data store\n // var ref = fbutil.ref('users', user.uid);\n // return fbutil.handler(function(cb) {\n // ref.set({email: email, name: name||firstPartOfEmail(email)}, cb);\n // });\n // })\n // .then(function(/* user */) {\n // // redirect to the account page\n // $location.path('/account');\n // }, function(err) {\n // $scope.err = errMessage(err);\n // });\n // }\n // };\n\n function assertValidAccountProps() {\n if( !$scope.email ) {\n $scope.err = 'Please enter an email address';\n }\n else if( !$scope.pass || !$scope.confirm ) {\n $scope.err = 'Please enter a password';\n }\n else if( $scope.createMode && $scope.pass !== $scope.confirm ) {\n $scope.err = 'Passwords do not match';\n }\n return !$scope.err;\n }\n\n function errMessage(err) {\n return angular.isObject(err) && err.code? err.code : err + '';\n }\n\n function firstPartOfEmail(email) {\n return ucfirst(email.substr(0, email.indexOf('@'))||'');\n }\n\n function ucfirst (str) {\n // inspired by: http://kevin.vanzonneveld.net\n str += '';\n var f = str.charAt(0).toUpperCase();\n return f + str.substr(1);\n }\n }]);", "meta": {"content_hash": "7c44fbb8056731391188daa0ba1384ea", "timestamp": "", "source": "github", "line_count": 97, "max_line_length": 112, "avg_line_length": 32.350515463917525, "alnum_prop": 0.5274059910771192, "repo_name": "chilltemp/smartthings-dashboard", "id": "a23926b71c74f3c706e3d978f28e4216d68c5a5d", "size": "3138", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/login/login.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "21668"}, {"name": "HTML", "bytes": "15180"}, {"name": "JavaScript", "bytes": "34852"}]}} {"text": "title: arg41\ntype: products\nimage: /img/Screen Shot 2017-05-09 at 11.56.54 AM.png\nheading: g41\ndescription: lksadjf lkasdjf lksajdf lksdaj flksadj flksa fdj\nmain:\n heading: Foo Bar BAz\n description: |-\n ***This is i a thing***kjh hjk kj \n # Blah Blah\n ## Blah![undefined](undefined)\n ### Baah\n image1:\n alt: kkkk\n---\n", "meta": {"content_hash": "9504474a28142b510a69fc2a5c1da3c2", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 61, "avg_line_length": 22.333333333333332, "alnum_prop": 0.6656716417910448, "repo_name": "pblack/kaldi-hugo-cms-template", "id": "6ed8e6b8f1062e45f514a82947ed21ee3eb15641", "size": "339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "site/content/pages2/arg41.md", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "94394"}, {"name": "HTML", "bytes": "18889"}, {"name": "JavaScript", "bytes": "10014"}]}} {"text": "package org.ybiquitous.messages.generator;\n\nimport static org.apache.velocity.runtime.RuntimeConstants.RESOURCE_LOADER;\nimport static org.apache.velocity.runtime.RuntimeConstants.RUNTIME_LOG_LOGSYSTEM_CLASS;\n\nimport java.net.URL;\nimport java.util.Properties;\n\nimport org.apache.velocity.runtime.log.NullLogChute;\nimport org.apache.velocity.runtime.log.SystemLogChute;\nimport org.apache.velocity.runtime.resource.loader.URLResourceLoader;\n\nfinal class VelocityConfig extends Properties {\n\n private static final long serialVersionUID = 1L;\n\n private static final boolean verbose = false;\n \n public VelocityConfig(URL url) {\n set(RESOURCE_LOADER, \"url\");\n set(\"url.resource.loader.class\", URLResourceLoader.class);\n set(\"url.resource.loader.root\", getParent(url));\n set(\"url.resource.loader.cache\", false);\n set(\"url.resource.loader.modificationCheckInterval\", 0);\n if (verbose) {\n set(RUNTIME_LOG_LOGSYSTEM_CLASS, SystemLogChute.class);\n set(SystemLogChute.RUNTIME_LOG_LEVEL_KEY, \"trace\");\n set(SystemLogChute.RUNTIME_LOG_SYSTEM_ERR_LEVEL_KEY, \"trace\");\n } else {\n set(RUNTIME_LOG_LOGSYSTEM_CLASS, NullLogChute.class);\n }\n }\n\n private String getParent(URL url) {\n String urlStr = url.toString();\n return urlStr.substring(0, urlStr.lastIndexOf('/'));\n }\n\n private void set(String key, Object value) {\n if (value instanceof String) {\n setProperty(key, (String) value);\n } else if (value instanceof Class) {\n setProperty(key, ((Class) value).getCanonicalName());\n } else {\n setProperty(key, value.toString());\n }\n }\n}\n", "meta": {"content_hash": "f8f93b6096686e4203d5b0f8b5b8e828", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 87, "avg_line_length": 35.854166666666664, "alnum_prop": 0.6717024985473562, "repo_name": "ybiquitous/messages", "id": "3bdfa40b433233dbd8ba507d82784ecaec132ee2", "size": "1721", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "messages-generator/src/main/java/org/ybiquitous/messages/generator/VelocityConfig.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "32560"}, {"name": "Scala", "bytes": "3937"}]}} {"text": "extern \"C\"\n{\n#endif\n\nvoid sw_log_error_exit(const char *fmt, ...);\nvoid sw_log_error(const char *fmt, ...);\nvoid sw_log_warn(const char *fmt, ...);\nvoid sw_log_msg(const char *fmt, ...);\nvoid sw_log_debug(const char *fmt, ...);\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif\n", "meta": {"content_hash": "9cf285d1910018de7ee794f299c446c5", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 45, "avg_line_length": 17.666666666666668, "alnum_prop": 0.630188679245283, "repo_name": "shuisheng918/libswevent", "id": "bd2122d2d0ab5b5b7c5eb9130484851ed71d6042", "size": "327", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sw_log.h", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "44833"}, {"name": "Makefile", "bytes": "752"}]}} {"text": "\n\n nord_stupid\n 0.0.1\n Mission control\n\n Magnus Thor Benediktsson\n Gustav Sandstr\u00f6m\n Tobias Lundin\n Lucas \u00c5str\u00f6m\n\n MIT\n\n\n https://github.com/Jinxit/nordic-robotics\n\n\n catkin\n roscpp\n std_msgs\n nord_messages\n roslib\n roscpp\n std_msgs\n message_runtime\n nord_messages\n roslib\n\n\n \n \n", "meta": {"content_hash": "93cd75054833a7779eaaf6edefd67de4", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 72, "avg_line_length": 28.53125, "alnum_prop": 0.723986856516977, "repo_name": "nordic-robotics/nord_stupid", "id": "548fb76ed972560815328eeef264d93a097fece8", "size": "916", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "package.xml", "mode": "33188", "license": "mit", "language": [{"name": "C++", "bytes": "23189"}]}} {"text": "\ufeff// ReSharper disable All\n\nnamespace OpenTl.Schema\n{\n\tusing System;\n\tusing System.Collections;\n\tusing System.Text;\n\n\tusing OpenTl.Schema;\n\tusing OpenTl.Schema.Serialization.Attributes;\t\n\n\t[Serialize(0x6242c773)]\n\tpublic sealed class TFileHash : IFileHash\n\t{\n [SerializationOrder(0)]\n public int Offset {get; set;}\n\n [SerializationOrder(1)]\n public int Limit {get; set;}\n\n [SerializationOrder(2)]\n public byte[] Hash {get; set;}\n\n\t}\n}\n", "meta": {"content_hash": "7a1757851ed642d798470facd18d95c3", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 47, "avg_line_length": 18.8, "alnum_prop": 0.674468085106383, "repo_name": "OpenTl/OpenTl.Schema", "id": "43deafbaf5530fb456dae21690ada51f5f70b141", "size": "472", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/OpenTl.Schema/_generated/_Entities/FileHash/TFileHash.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "810786"}, {"name": "F#", "bytes": "19501"}, {"name": "PowerShell", "bytes": "1288"}]}} {"text": "package com.eaw1805.data.managers.beans;\n\nimport com.eaw1805.data.model.Game;\nimport com.eaw1805.data.model.Nation;\nimport com.eaw1805.data.model.fleet.Ship;\nimport com.eaw1805.data.model.map.Position;\nimport com.eaw1805.data.model.map.Region;\nimport com.eaw1805.data.model.map.Sector;\n\nimport java.math.BigInteger;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n * The interface of the ShipManagerBean.\n */\npublic interface ShipManagerBean extends EntityBean {\n\n /**\n * get the Ship from the database that corresponds to the input id.\n *\n * @param entityID the id of the Entity object.\n * @return an Entity object.\n */\n Ship getByID(int entityID);\n\n /**\n * adding a new entry into the database, according to the input object it\n * receives.\n *\n * @param value the Ship tha we want to add.\n */\n void add(final Ship value);\n\n /**\n * updating an entry into the database, according to the input object it\n * receives.\n *\n * @param value the Ship tha we want to update.\n */\n void update(final Ship value);\n\n /**\n * Delete the input Ship from the database.\n *\n * @param entity the Ship tha we want to delete\n */\n void delete(Ship entity);\n\n /**\n * Listing all the Ships from the database.\n *\n * @return a list of all the Ships that exist inside the table Avatar.\n */\n List list();\n\n /**\n * Listing all the Ships from the database for the specific game.\n *\n * @param thisGame the game to select.\n * @return a list of all the Ships.\n */\n List listByGame(final Game thisGame);\n\n /**\n * Listing all the Ships from the database members of the specific fleet.\n *\n * @param thisGame the game to select.\n * @param fleet the fleet to select.\n * @return a list of all the Ships.\n */\n List listByFleet(final Game thisGame, final int fleet);\n\n /**\n * Listing all the Ships from the database at the specific position owned by the specific nation.\n *\n * @param thisPosition the position to select.\n * @param nation the nation to select.\n * @return a list of all the Ships.\n */\n List listByPositionNation(final Position thisPosition, final Nation nation);\n\n /**\n * Listing all the ships from the database that belongs in the specific game and nation.\n *\n * @param thisGame The Game.\n * @param thisNation The Nation.\n * @return A list of all the Ships.\n */\n List listGameNation(final Game thisGame, final Nation thisNation);\n\n /**\n * Listing all the Ships from the database at the specific position.\n *\n * @param thisGame the game to select.\n * @param nation the nation to select.\n * @param region the region to select.\n * @return a list of all the Ships.\n */\n List listGameNationRegion(final Game thisGame, final Nation nation, final Region region);\n\n /**\n * List all ships in the specific position and game.\n *\n * @param position The position to list the ships that are on it.\n * @return A list of ships.\n */\n List listByGamePosition(final Position position);\n\n /**\n * Listing all the free Ships from the database for a specific game.\n *\n * @param thisGame the game to select.\n * @return a list of all free Ships.\n */\n List listFreeByGame(final Game thisGame);\n\n /**\n * Listing all the free Ships from the database owned by the specific nation..\n *\n * @param thisGame the game to select.\n * @param nation the nation to select.\n * @return a list of all free Ships.\n */\n List listFreeByGameNation(final Game thisGame, final Nation nation);\n\n /**\n * Listing all the free Ships from the database owned by the specific nation..\n *\n * @param thisGame the game to select.\n * @param nation the nation to select.\n * @param region the region to select.\n * @return a list of all free Ships.\n */\n List listFreeByGameNationRegion(final Game thisGame, final Nation nation, final Region region);\n\n /**\n * Listing sectors with ships belonging to more than 1 owner.\n *\n * @param thisGame the game to select.\n * @return a list of all the sectors.\n */\n List listMultiOwners(final Game thisGame);\n\n /**\n * List all nations that have ships in the given position.\n *\n * @param thisPosition the position.\n * @return a list of nations.\n */\n List listOwners(final Position thisPosition);\n\n /**\n * List the number of ships per sector for particular nation.\n *\n * @param thisGame the Game .\n * @param thisNation the Nation owner.\n * @return a mapping of ship count to sectors.\n */\n Map countShips(final Game thisGame, final Nation thisNation);\n\n /**\n * Count the number of ships at the given position based on their owner.\n *\n * @param thisPosition the position.\n * @param onlyMerchant true, count only Merchant, false count only warships.\n * @return a mapping of ships count to nations.\n */\n Map countShipsByOwner(final Position thisPosition, final boolean onlyMerchant);\n\n /**\n * Count the number of ships at the given position based on their owner.\n *\n * @param thisPosition the position.\n * @return a mapping of ships count to nations.\n */\n Map countNearbyShipsByOwner(final Position thisPosition);\n\n /**\n * Remove all the flags signifying participation in a naval battle.\n *\n * @param thisGame the game to select.\n */\n void removeNavalFlag(final Game thisGame);\n\n /**\n * Remove all the flags signifying movement.\n *\n * @param thisGame the game to select.\n */\n void removeHasMovedFlag(final Game thisGame);\n\n /**\n * List all ships positioned at a given sector.\n *\n * @param position the position to check.\n * @return the list of ships.\n */\n List listAllBySector(final Position position);\n}\n", "meta": {"content_hash": "c0346f5dd386d9c86a51cb43cd6521b8", "timestamp": "", "source": "github", "line_count": 200, "max_line_length": 105, "avg_line_length": 30.56, "alnum_prop": 0.6482329842931938, "repo_name": "EaW1805/data", "id": "8ca0f13b675be75ad137bc0a7be3a9dd9104431c", "size": "6112", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/eaw1805/data/managers/beans/ShipManagerBean.java", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "5407"}, {"name": "Java", "bytes": "1613914"}]}} {"text": "import collections\n\nfrom supriya.ugens.PV_MagSquared import PV_MagSquared\n\n\nclass PV_MagNoise(PV_MagSquared):\n \"\"\"\n Multiplies magnitudes by noise.\n\n ::\n\n >>> pv_chain = supriya.ugens.FFT(\n ... source=supriya.ugens.WhiteNoise.ar(),\n ... )\n >>> pv_mag_noise = supriya.ugens.PV_MagNoise.new(\n ... pv_chain=pv_chain,\n ... )\n >>> pv_mag_noise\n PV_MagNoise.kr()\n\n \"\"\"\n\n ### CLASS VARIABLES ###\n\n _ordered_input_names = collections.OrderedDict([(\"pv_chain\", None)])\n", "meta": {"content_hash": "315b9f1b59e3af61cf70e175c42e6bf7", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 72, "avg_line_length": 22.0, "alnum_prop": 0.5527272727272727, "repo_name": "Pulgama/supriya", "id": "ecaa0ca5064dcf587b15d4d42ff500a5eb243506", "size": "550", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "supriya/ugens/PV_MagNoise.py", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "6712"}, {"name": "CSS", "bytes": "446"}, {"name": "HTML", "bytes": "1083"}, {"name": "JavaScript", "bytes": "6163"}, {"name": "Makefile", "bytes": "6775"}, {"name": "Python", "bytes": "2790612"}, {"name": "Shell", "bytes": "569"}]}} {"text": "\ufeff//Copyright(c) 2001-2021 Aspose Pty Ltd.All rights reserved.\n//https://github.com/aspose-barcode/Aspose.BarCode-for-.NET\nusing System;\nusing System.Text;\nusing Aspose.BarCode.Generation;\nusing Aspose.BarCode.BarCodeRecognition;\n\nnamespace Aspose.BarCode.Examples.CSharp.BarcodeGeneration\n{\n internal class MacroPdf417Optional : TwoDBase\n {\n\t\tpublic static void Run()\n {\n string path = GetFolder();\r\n System.Console.WriteLine(\"MacroPdf417Optional:\");\r\n\n Console.OutputEncoding = Encoding.Unicode;\n BarcodeGenerator gen = new BarcodeGenerator(EncodeTypes.MacroPdf417, \"\u00c5sp\u00f3se.Barc\u00f3de\u00a9\");\n gen.Parameters.Barcode.XDimension.Pixels = 2;\n //set metadata\n gen.Parameters.Barcode.Pdf417.Pdf417MacroFileID = 12345678;\n gen.Parameters.Barcode.Pdf417.Pdf417MacroSegmentID = 12;\n gen.Parameters.Barcode.Pdf417.Pdf417MacroSegmentsCount = 20;\n gen.Parameters.Barcode.Pdf417.Pdf417MacroFileName = \"file01\";\n //checksumm must be calculated in CCITT-16 / CRC-16-CCITT encoding\n //https://en.wikipedia.org/wiki/Cyclic_redundancy_check#Polynomial_representations_of_cyclic_redundancy_checks\n //for the example we use random number\n gen.Parameters.Barcode.Pdf417.Pdf417MacroChecksum = 1234;\n gen.Parameters.Barcode.Pdf417.Pdf417MacroFileSize = 400000;\n gen.Parameters.Barcode.Pdf417.Pdf417MacroTimeStamp = new DateTime(2019, 11, 1);\n gen.Parameters.Barcode.Pdf417.Pdf417MacroAddressee = \"street\";\n gen.Parameters.Barcode.Pdf417.Pdf417MacroSender = \"aspose\";\n gen.Save($\"{path}MacroPdf417Optional.png\", BarCodeImageFormat.Png);\n //try to recognize it\n BarCodeReader read = new BarCodeReader(gen.GenerateBarCodeImage(), DecodeType.MacroPdf417);\n foreach (BarCodeResult result in read.ReadBarCodes())\n {\r\n Console.WriteLine(\"---MacroPdf417Optional---\");\n Console.WriteLine(\"Codetext:\" + result.CodeText);\n Console.WriteLine(\"Pdf417MacroFileID:\" + result.Extended.Pdf417.MacroPdf417FileID);\n Console.WriteLine(\"Pdf417MacroSegmentID:\" + result.Extended.Pdf417.MacroPdf417SegmentID.ToString());\n Console.WriteLine(\"Pdf417MacroSegmentsCount:\" + result.Extended.Pdf417.MacroPdf417SegmentsCount.ToString());\n Console.WriteLine(\"Pdf417MacroFileName:\" + result.Extended.Pdf417.MacroPdf417FileName);\n Console.WriteLine(\"Pdf417MacroChecksum:\" + result.Extended.Pdf417.MacroPdf417Checksum.ToString());\n Console.WriteLine(\"Pdf417MacroFileSize:\" + result.Extended.Pdf417.MacroPdf417FileSize.ToString());\n Console.WriteLine(\"Pdf417MacroTimeStamp:\" + result.Extended.Pdf417.MacroPdf417TimeStamp.ToString());\n Console.WriteLine(\"Pdf417MacroAddressee:\" + result.Extended.Pdf417.MacroPdf417Addressee);\n Console.WriteLine(\"Pdf417MacroSender:\" + result.Extended.Pdf417.MacroPdf417Sender);\n }\n }\n\t}\n}", "meta": {"content_hash": "20c438070c7fb027389f37469e474345", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 124, "avg_line_length": 59.75, "alnum_prop": 0.6861924686192469, "repo_name": "aspose-barcode/Aspose.BarCode-for-.NET", "id": "70049c30ee1911aba05a8d31c362f7f45b39770e", "size": "3113", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Examples/CSharp/BarcodeGeneration/Barcode2D/MacroPdf417Optional.cs", "mode": "33188", "license": "mit", "language": [{"name": "ASP.NET", "bytes": "2339"}, {"name": "C#", "bytes": "34258"}, {"name": "CSS", "bytes": "3794"}, {"name": "HTML", "bytes": "1778"}, {"name": "JavaScript", "bytes": "54709"}]}} {"text": "\n\npackage com.sun.source.util;\n\nimport com.sun.source.tree.*;\n\n/**\n * A TreeVisitor that visits all the child tree nodes, and provides\n * support for maintaining a path for the parent nodes.\n * To visit nodes of a particular type, just override the\n * corresponding visitorXYZ method.\n * Inside your method, call super.visitXYZ to visit descendant\n * nodes.\n *\n * @author Jonathan Gibbons\n * @since 1.6\n */\n@jdk.Exported\npublic class TreePathScanner extends TreeScanner {\n\n /**\n * Scan a tree from a position identified by a TreePath.\n */\n public R scan(TreePath path, P p) {\n this.path = path;\n try {\n return path.getLeaf().accept(this, p);\n } finally {\n this.path = null;\n }\n }\n\n /**\n * Scan a single node.\n * The current path is updated for the duration of the scan.\n */\n @Override\n public R scan(Tree tree, P p) {\n if (tree == null)\n return null;\n\n TreePath prev = path;\n path = new TreePath(path, tree);\n try {\n return tree.accept(this, p);\n } finally {\n path = prev;\n }\n }\n\n /**\n * Get the current path for the node, as built up by the currently\n * active set of scan calls.\n */\n public TreePath getCurrentPath() {\n return path;\n }\n\n private TreePath path;\n}\n", "meta": {"content_hash": "04a789be713cee905401a4c9e6f0a149", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 70, "avg_line_length": 22.9, "alnum_prop": 0.5829694323144105, "repo_name": "rokn/Count_Words_2015", "id": "967bcfd809b61cd52d3e4b22e7f109650c0df7ed", "size": "2586", "binary": false, "copies": "38", "ref": "refs/heads/master", "path": "testing/openjdk2/langtools/src/share/classes/com/sun/source/util/TreePathScanner.java", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "61802"}, {"name": "Ruby", "bytes": "18888605"}]}} {"text": "/*************************************************************************/\n/* path_3d.h */\n/*************************************************************************/\n/* This file is part of: */\n/* GODOT ENGINE */\n/* https://godotengine.org */\n/*************************************************************************/\n/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */\n/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */\n/* */\n/* Permission is hereby granted, free of charge, to any person obtaining */\n/* a copy of this software and associated documentation files (the */\n/* \"Software\"), to deal in the Software without restriction, including */\n/* without limitation the rights to use, copy, modify, merge, publish, */\n/* distribute, sublicense, and/or sell copies of the Software, and to */\n/* permit persons to whom the Software is furnished to do so, subject to */\n/* the following conditions: */\n/* */\n/* The above copyright notice and this permission notice shall be */\n/* included in all copies or substantial portions of the Software. */\n/* */\n/* THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, */\n/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */\n/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/\n/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */\n/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */\n/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */\n/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */\n/*************************************************************************/\n\n#ifndef PATH_3D_H\n#define PATH_3D_H\n\n#include \"scene/3d/node_3d.h\"\n#include \"scene/resources/curve.h\"\n\nclass Path3D : public Node3D {\n\tGDCLASS(Path3D, Node3D);\n\n\tRef curve;\n\n\tvoid _curve_changed();\n\n\tRID debug_instance;\n\tRef debug_mesh;\n\nprivate:\n\tvoid _update_debug_mesh();\n\nprotected:\n\tvoid _notification(int p_what);\n\n\tstatic void _bind_methods();\n\npublic:\n\tvoid set_curve(const Ref &p_curve);\n\tRef get_curve() const;\n\n\tPath3D();\n\t~Path3D();\n};\n\nclass PathFollow3D : public Node3D {\n\tGDCLASS(PathFollow3D, Node3D);\n\npublic:\n\tenum RotationMode {\n\t\tROTATION_NONE,\n\t\tROTATION_Y,\n\t\tROTATION_XY,\n\t\tROTATION_XYZ,\n\t\tROTATION_ORIENTED\n\t};\n\nprivate:\n\tPath3D *path = nullptr;\n\treal_t prev_offset = 0.0; // Offset during the last _update_transform.\n\treal_t progress = 0.0;\n\treal_t h_offset = 0.0;\n\treal_t v_offset = 0.0;\n\tbool cubic = true;\n\tbool loop = true;\n\tRotationMode rotation_mode = ROTATION_XYZ;\n\n\tvoid _update_transform(bool p_update_xyz_rot = true);\n\nprotected:\n\tvoid _validate_property(PropertyInfo &p_property) const;\n\n\tvoid _notification(int p_what);\n\tstatic void _bind_methods();\n\npublic:\n\tvoid set_progress(real_t p_progress);\n\treal_t get_progress() const;\n\n\tvoid set_h_offset(real_t p_h_offset);\n\treal_t get_h_offset() const;\n\n\tvoid set_v_offset(real_t p_v_offset);\n\treal_t get_v_offset() const;\n\n\tvoid set_progress_ratio(real_t p_ratio);\n\treal_t get_progress_ratio() const;\n\n\tvoid set_loop(bool p_loop);\n\tbool has_loop() const;\n\n\tvoid set_rotation_mode(RotationMode p_rotation_mode);\n\tRotationMode get_rotation_mode() const;\n\n\tvoid set_cubic_interpolation(bool p_enable);\n\tbool get_cubic_interpolation() const;\n\n\tTypedArray get_configuration_warnings() const override;\n\n\tPathFollow3D() {}\n};\n\nVARIANT_ENUM_CAST(PathFollow3D::RotationMode);\n\n#endif // PATH_3D_H\n", "meta": {"content_hash": "fd9b6c9c459cebf8040ade46276a1807", "timestamp": "", "source": "github", "line_count": 122, "max_line_length": 75, "avg_line_length": 32.959016393442624, "alnum_prop": 0.5630440189007709, "repo_name": "ZuBsPaCe/godot", "id": "45fa2c891748f274d08eb94861020759362ef0cb", "size": "4021", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "scene/3d/path_3d.h", "mode": "33188", "license": "mit", "language": [{"name": "AIDL", "bytes": "1633"}, {"name": "C", "bytes": "1045182"}, {"name": "C#", "bytes": "1605400"}, {"name": "C++", "bytes": "39214077"}, {"name": "CMake", "bytes": "606"}, {"name": "GAP", "bytes": "62"}, {"name": "GDScript", "bytes": "66163"}, {"name": "GLSL", "bytes": "830941"}, {"name": "Java", "bytes": "596106"}, {"name": "JavaScript", "bytes": "188456"}, {"name": "Kotlin", "bytes": "93069"}, {"name": "Makefile", "bytes": "1421"}, {"name": "Objective-C", "bytes": "20550"}, {"name": "Objective-C++", "bytes": "381709"}, {"name": "PowerShell", "bytes": "2713"}, {"name": "Python", "bytes": "461706"}, {"name": "Shell", "bytes": "32416"}]}} {"text": "from django.test import TestCase\nfrom django.utils.timezone import get_current_timezone\n\nfrom app.utils import *\nfrom app.myblog.models import Article, Classification\n\n\nclass TestEncodeJson(TestCase):\n def test_ecnode(self):\n res = encodejson(1, {})\n self.assertIsInstance(res ,str)\n\n\nclass TestCreateRandom(TestCase):\n def test_create(self):\n res = create_random_str(10)\n self.assertEqual(len(res), 10)\n res = create_random_str(62)\n self.assertEqual(len(res), 62)\n res = create_random_str(63)\n self.assertEqual(res, 'too long str')\n\n def test_format(self):\n res = create_random_str(60)\n for itm in ['+', '-', '_', '=', '|', '!', '?', '`', '~', '@', '#', '$', '%', '^', '&', '*', '(', ')']:\n self.assertNotIn(itm, res)\n\n\nclass TestString2Datetime(TestCase):\n def test_convert(self):\n sample = '2011-1-1 19:25:01'\n res = string_to_datetime(sample)\n self.assertIsInstance(res, datetime.datetime)\n self.assertEqual(res.second, 1)\n self.assertEqual(res.minute, 25)\n self.assertEqual(res.hour, 19)\n self.assertEqual(res.day, 1)\n self.assertEqual(res.month, 1)\n self.assertEqual(res.year, 2011)\n\n def test_format(self):\n sample = '2015/1/1 23-12-11'\n format_str = '%Y/%m/%d %H-%M-%S'\n res = string_to_datetime(sample, format_str)\n self.assertIsInstance(res, datetime.datetime)\n\n\nclass TestDatetime2Timestamp(TestCase):\n def test_convert(self):\n sample = datetime.datetime.now()\n res = datetime_to_timestamp(sample)\n self.assertIsInstance(res, float)\n sample.replace(tzinfo=get_current_timezone())\n res = datetime_to_timestamp(sample)\n self.assertIsInstance(res, float)\n\n\nclass TestDatetime2String(TestCase):\n def test_convert(self):\n sample = string_to_datetime('2011-1-1 19:25:01')\n res = datetime_to_string(sample)\n self.assertEqual(res, '2011-01-01 19:25:01')\n sample.replace(tzinfo=get_current_timezone())\n res = datetime_to_string(sample)\n self.assertEqual(res, '2011-01-01 19:25:01')\n\n\nclass TestDatetime2UtcString(TestCase):\n def test_convert(self):\n sample = string_to_datetime('2011-1-1 19:25:01')\n res = datetime_to_utc_string(sample)\n self.assertEqual(res, '2011-01-01 19:25:01+08:00')\n\n\nclass TestModeSerializer(TestCase):\n def setUp(self):\n classify = Classification.objects.create(c_name='test')\n art = Article.objects.create(caption='article',\n sub_caption='sub_article',\n classification=classify,\n content='article test')\n art1 = Article.objects.create(caption='article1',\n sub_caption='sub_article',\n classification=classify,\n content='article test')\n\n def test_serializer(self):\n art = Article.objects.get(caption='article')\n serial = model_serializer(art)\n self.assertIsInstance(serial, dict)\n serial = model_serializer(art, serializer='json')\n self.assertIsInstance(serial, str)\n serial = model_serializer(art, serializer='xml')\n self.assertIn('xml version=\"1.0', serial)\n\n def test_serializer_list(self):\n art_list = Article.objects.all()\n serial = model_serializer(art_list)\n self.assertIsInstance(serial, list)\n serial = model_serializer(art_list, serializer='json')\n self.assertIsInstance(serial, str)\n\n def test_include(self):\n art = Article.objects.get(caption='article')\n serial = model_serializer(art, include_attr=['caption', 'content'])\n self.assertIn('caption', serial)\n self.assertNotIn('create_time', serial)\n\n def test_except(self):\n art = Article.objects.get(caption='article')\n serial = model_serializer(art, except_attr=['caption', 'content'])\n self.assertNotIn('caption', serial)\n self.assertIn('create_time', serial)\n\n def test_include_except(self):\n art = Article.objects.get(caption='article')\n serial = model_serializer(art, include_attr=['caption', 'content'], except_attr=['content'])\n self.assertIn('caption', serial)\n self.assertNotIn('content', serial)\n\n\nclass TestCreateVerifyPic(TestCase):\n def test_create(self):\n img, code = create_verify_code()\n self.assertIsInstance(img, str)\n self.assertIsInstance(code, str)\n", "meta": {"content_hash": "cca036fde1a653baec31b280e0478f0d", "timestamp": "", "source": "github", "line_count": 126, "max_line_length": 110, "avg_line_length": 36.26984126984127, "alnum_prop": 0.613129102844639, "repo_name": "madarou/angular-django", "id": "b7bc6922332a89a585b6ffbc09e0aa106a795778", "size": "4570", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/test_utils.py", "mode": "33261", "license": "bsd-2-clause", "language": [{"name": "CSS", "bytes": "356826"}, {"name": "HTML", "bytes": "77340"}, {"name": "Python", "bytes": "1816255"}]}} {"text": "\"use strict\";\r\nObject.defineProperty(exports, \"__esModule\", { value: true });\r\nconst wildemitter = require(\"wildemitter\");\r\nconst ApiBase_1 = require(\"../ApiBase\");\r\n/**\r\n * AgentPhone api implementation.\r\n */\r\nclass AgentPhone {\r\n /**\r\n * AgentPhone api implementation.\r\n *\r\n * @param {Object} agentPhoneOptions A collection of options.\r\n *\r\n * @example\r\n * options = {\r\n *\t\tdebug: true,\r\n * domainURIPath: \"https://api-a32.nice-incontact.com/inContactAPI\",\r\n * baseURIPath: \"/services/v15.0/\",\r\n * authorization: \"Bearer [Token Value]\",\r\n * timeout: 10000, // default is '0' (0 seconds timeout)\r\n * }\r\n */\r\n constructor(agentPhoneOptions) {\r\n this.agentPhoneOptions = agentPhoneOptions;\r\n // local.\r\n let self = this;\r\n let parent = agentPhoneOptions.parent;\r\n let uniqueID = \"Agent.AgentPhone.\";\r\n let item;\r\n let options = agentPhoneOptions || {};\r\n let config = this.config = {\r\n debug: false,\r\n domainURIPath: \"https://api-a32.nice-incontact.com/inContactAPI\",\r\n baseURIPath: \"/services/v15.0/\",\r\n authorization: \"Bearer [Token Value]\",\r\n timeout: 0\r\n };\r\n // Assign global.\r\n this.parent = parent;\r\n this.logger = parent.logger;\r\n this.uniqueID = uniqueID;\r\n // set our config from options\r\n for (item in options) {\r\n if (options.hasOwnProperty(item)) {\r\n this.config[item] = options[item];\r\n }\r\n }\r\n // Call WildEmitter constructor.\r\n wildemitter.mixin(AgentPhone);\r\n // Create the request instance.\r\n this.apirequest = new ApiBase_1.ApiRequest(this.config);\r\n }\r\n /**\r\n * Dial agent phone.\r\n *\r\n * @param {string} sessionId\t\tThe session id.\r\n * @param {Object} requestOptions A collection of request options.\r\n *\r\n * @example\r\n * options = {\r\n *\t\ttimeout: 10000, // default is '0' (0 seconds timeout),\r\n *\t\tcancelToken: new CancelToken(function (cancel) {}) // 'cancelToken' specifies a cancel token that can be used to cancel the request (see Cancellation section below for details)\r\n *\r\n *\t\t// 'params' are the URL parameters to be sent with the request\r\n *\t\t// Must be a plain object or a URLSearchParams object\r\n *\t\tparams: { ID: 12345 },\r\n *\r\n *\t\t// 'data' is the data to be sent as the request body.\r\n *\t\tdata: { ID: 'Unique' },\r\n * }\r\n */\r\n dialAgentPhoneAsync(sessionId, requestOptions) {\r\n // Create local refs.\r\n let localExecute = 'Dial agent phone';\r\n let localUniqueID = this.uniqueID + \"dialAgentPhoneAsync\";\r\n let localUrl = 'agent-sessions/' + sessionId + '/agent-phone/dial';\r\n let localTimeout = this.config.timeout;\r\n // Assign the request options.\r\n let options = requestOptions || {};\r\n let requestConfig = {\r\n url: localUrl,\r\n method: 'POST',\r\n baseURL: this.config.domainURIPath + this.config.baseURIPath,\r\n headers: {\r\n 'Authorization': this.config.authorization,\r\n 'Content-Type': 'application/json'\r\n },\r\n timeout: localTimeout\r\n };\r\n // Execute the request.\r\n this.apirequest.request(localExecute, localUniqueID, requestConfig, options);\r\n }\r\n /**\r\n * Mute agent phone.\r\n *\r\n * @param {string} sessionId\t\tThe session id.\r\n * @param {Object} requestOptions A collection of request options.\r\n *\r\n * @example\r\n * options = {\r\n *\t\ttimeout: 10000, // default is '0' (0 seconds timeout),\r\n *\t\tcancelToken: new CancelToken(function (cancel) {}) // 'cancelToken' specifies a cancel token that can be used to cancel the request (see Cancellation section below for details)\r\n *\r\n *\t\t// 'params' are the URL parameters to be sent with the request\r\n *\t\t// Must be a plain object or a URLSearchParams object\r\n *\t\tparams: { ID: 12345 },\r\n *\r\n *\t\t// 'data' is the data to be sent as the request body.\r\n *\t\tdata: { ID: 'Unique' },\r\n * }\r\n */\r\n muteAgentPhoneAsync(sessionId, requestOptions) {\r\n // Create local refs.\r\n let localExecute = 'Mute agent phone';\r\n let localUniqueID = this.uniqueID + \"muteAgentPhoneAsync\";\r\n let localUrl = 'agent-sessions/' + sessionId + '/agent-phone/mute';\r\n let localTimeout = this.config.timeout;\r\n // Assign the request options.\r\n let options = requestOptions || {};\r\n let requestConfig = {\r\n url: localUrl,\r\n method: 'POST',\r\n baseURL: this.config.domainURIPath + this.config.baseURIPath,\r\n headers: {\r\n 'Authorization': this.config.authorization,\r\n 'Content-Type': 'application/json'\r\n },\r\n timeout: localTimeout\r\n };\r\n // Execute the request.\r\n this.apirequest.request(localExecute, localUniqueID, requestConfig, options);\r\n }\r\n /**\r\n * Un-Mute agent phone.\r\n *\r\n * @param {string} sessionId\t\tThe session id.\r\n * @param {Object} requestOptions A collection of request options.\r\n *\r\n * @example\r\n * options = {\r\n *\t\ttimeout: 10000, // default is '0' (0 seconds timeout),\r\n *\t\tcancelToken: new CancelToken(function (cancel) {}) // 'cancelToken' specifies a cancel token that can be used to cancel the request (see Cancellation section below for details)\r\n *\r\n *\t\t// 'params' are the URL parameters to be sent with the request\r\n *\t\t// Must be a plain object or a URLSearchParams object\r\n *\t\tparams: { ID: 12345 },\r\n *\r\n *\t\t// 'data' is the data to be sent as the request body.\r\n *\t\tdata: { ID: 'Unique' },\r\n * }\r\n */\r\n unMuteAgentPhoneAsync(sessionId, requestOptions) {\r\n // Create local refs.\r\n let localExecute = 'Un-Mute agent phone';\r\n let localUniqueID = this.uniqueID + \"unMuteAgentPhoneAsync\";\r\n let localUrl = 'agent-sessions/' + sessionId + '/agent-phone/unmute';\r\n let localTimeout = this.config.timeout;\r\n // Assign the request options.\r\n let options = requestOptions || {};\r\n let requestConfig = {\r\n url: localUrl,\r\n method: 'POST',\r\n baseURL: this.config.domainURIPath + this.config.baseURIPath,\r\n headers: {\r\n 'Authorization': this.config.authorization,\r\n 'Content-Type': 'application/json'\r\n },\r\n timeout: localTimeout\r\n };\r\n // Execute the request.\r\n this.apirequest.request(localExecute, localUniqueID, requestConfig, options);\r\n }\r\n /**\r\n * Ends the agents phone call.\r\n *\r\n * @param {string} sessionId\t\tThe session id.\r\n * @param {Object} requestOptions A collection of request options.\r\n *\r\n * @example\r\n * options = {\r\n *\t\ttimeout: 10000, // default is '0' (0 seconds timeout),\r\n *\t\tcancelToken: new CancelToken(function (cancel) {}) // 'cancelToken' specifies a cancel token that can be used to cancel the request (see Cancellation section below for details)\r\n *\r\n *\t\t// 'params' are the URL parameters to be sent with the request\r\n *\t\t// Must be a plain object or a URLSearchParams object\r\n *\t\tparams: { ID: 12345 },\r\n *\r\n *\t\t// 'data' is the data to be sent as the request body.\r\n *\t\tdata: { ID: 'Unique' },\r\n * }\r\n */\r\n endAgentPhoneCallAsync(sessionId, requestOptions) {\r\n // Create local refs.\r\n let localExecute = 'Ends the agents phone call';\r\n let localUniqueID = this.uniqueID + \"endAgentPhoneCallAsync\";\r\n let localUrl = 'agent-sessions/' + sessionId + '/agent-phone/end';\r\n let localTimeout = this.config.timeout;\r\n // Assign the request options.\r\n let options = requestOptions || {};\r\n let requestConfig = {\r\n url: localUrl,\r\n method: 'POST',\r\n baseURL: this.config.domainURIPath + this.config.baseURIPath,\r\n headers: {\r\n 'Authorization': this.config.authorization,\r\n 'Content-Type': 'application/json'\r\n },\r\n timeout: localTimeout\r\n };\r\n // Execute the request.\r\n this.apirequest.request(localExecute, localUniqueID, requestConfig, options);\r\n }\r\n}\r\nexports.AgentPhone = AgentPhone;\r\n", "meta": {"content_hash": "6103e6536a1dbe5b22ea24e9fe6b8739", "timestamp": "", "source": "github", "line_count": 214, "max_line_length": 183, "avg_line_length": 39.55140186915888, "alnum_prop": 0.5763232514177694, "repo_name": "drazenzadravec/projects", "id": "5b7a1b9fff6fa8381d97dba3a6f9039029cb1d58", "size": "8464", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "nice/sdk/lakenicejs/api/agent/agentphone.js", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "2754"}, {"name": "C", "bytes": "720"}, {"name": "C#", "bytes": "5049589"}, {"name": "C++", "bytes": "2005494"}, {"name": "CSS", "bytes": "19127"}, {"name": "HLSL", "bytes": "452"}, {"name": "HTML", "bytes": "90518"}, {"name": "JavaScript", "bytes": "8105941"}, {"name": "Shell", "bytes": "234"}, {"name": "TypeScript", "bytes": "1588858"}]}} {"text": "use url::Url;\nuse toml;\n\nuse lsio::error::{Error, Result};\nuse lsio::config::{ConfigFile, ParseInto};\n\n/// Config by default is located at $HOME/.s3lsio/config for a given user. You can pass in an option\n/// on the cli ```-c \"\"``` and it will override the default.\n///\n/// If for some reason there is no config file and nothing is passed in the all of the\n/// fields will be None for Option values or whatever the defaults are for a given type.\n///\n///\n#[derive(Clone, Debug, PartialEq, Eq)]\npub struct Config {\n /// endpoint is in the format ://:\n pub endpoint: Option,\n /// proxy is in the format ://:\n pub proxy: Option,\n /// signature is either V2 or V4\n pub signature: String,\n}\n\nimpl ConfigFile for Config {\n type Error = Error;\n\n fn from_toml(toml: toml::Value) -> Result {\n let mut cfg = Config::default();\n\n try!(toml.parse_into(\"options.endpoint\", &mut cfg.endpoint));\n try!(toml.parse_into(\"options.proxy\", &mut cfg.proxy));\n try!(toml.parse_into(\"options.signature\", &mut cfg.signature));\n\n Ok(cfg)\n }\n}\n\nimpl Default for Config {\n fn default() -> Self {\n Config {\n endpoint: None,\n proxy: None,\n signature: \"V4\".to_string(),\n }\n }\n}\n\nimpl Config {\n pub fn set_endpoint(&mut self, value: Option) {\n self.endpoint = value;\n }\n\n pub fn set_proxy(&mut self, value: Option) {\n self.proxy = value;\n }\n\n pub fn set_signature(&mut self, value: String) {\n self.signature = value;\n }\n\n pub fn endpoint(&self) -> &Option {\n &self.endpoint\n }\n\n pub fn proxy(&self) -> &Option {\n &self.proxy\n }\n}\n", "meta": {"content_hash": "d3ea8ac83f81b03b397377c329a070b5", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 100, "avg_line_length": 26.13235294117647, "alnum_prop": 0.5981992121553179, "repo_name": "lambdastackio/s3lsio", "id": "86d5a0c2f90e21b877b685695cb76cde3941769c", "size": "2383", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/config.rs", "mode": "33188", "license": "apache-2.0", "language": [{"name": "HTML", "bytes": "662"}, {"name": "Rust", "bytes": "198061"}, {"name": "Shell", "bytes": "6705"}]}} {"text": "import React, { Component } from 'react';\nimport './App.css';\nimport Header from './components/header/header';\nimport Footer from './components/footer/footer';\nimport Acrylics from './components/acrylics/acrylics';\n\nclass App extends Component {\n render() {\n\n return (\n
\n
\n \n
\n
\n );\n }\n}\n\nexport default App;\n", "meta": {"content_hash": "57485d01e9adc96300926dfc9d7e15dd", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 54, "avg_line_length": 20.2, "alnum_prop": 0.6212871287128713, "repo_name": "bradyhouse/house", "id": "cd89fe6159834a1ec868b533e830d461e034b481", "size": "404", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "fiddles/react/fiddle-0015-Portfolio/src/App.js", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "26015"}, {"name": "CSS", "bytes": "3541537"}, {"name": "HTML", "bytes": "3275889"}, {"name": "Handlebars", "bytes": "1593"}, {"name": "Java", "bytes": "90609"}, {"name": "JavaScript", "bytes": "9249816"}, {"name": "Less", "bytes": "3364"}, {"name": "PHP", "bytes": "125609"}, {"name": "Pug", "bytes": "1758"}, {"name": "Python", "bytes": "20858"}, {"name": "Ruby", "bytes": "11317"}, {"name": "SCSS", "bytes": "37673"}, {"name": "Shell", "bytes": "1095755"}, {"name": "TypeScript", "bytes": "779887"}]}} {"text": "@interface AppDelegate () \n\n@end\n\n@implementation AppDelegate\n\n\n- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {\n // Override point for customization after application launch.\n UISplitViewController *splitViewController = (UISplitViewController *)self.window.rootViewController;\n UINavigationController *navigationController = [splitViewController.viewControllers lastObject];\n navigationController.topViewController.navigationItem.leftBarButtonItem = splitViewController.displayModeButtonItem;\n splitViewController.delegate = self;\n return YES;\n}\n\n- (void)applicationWillResignActive:(UIApplication *)application {\n // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.\n // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.\n}\n\n- (void)applicationDidEnterBackground:(UIApplication *)application {\n // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.\n // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.\n}\n\n- (void)applicationWillEnterForeground:(UIApplication *)application {\n // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.\n}\n\n- (void)applicationDidBecomeActive:(UIApplication *)application {\n // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.\n}\n\n- (void)applicationWillTerminate:(UIApplication *)application {\n // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.\n}\n\n#pragma mark - Split view\n\n- (BOOL)splitViewController:(UISplitViewController *)splitViewController collapseSecondaryViewController:(UIViewController *)secondaryViewController ontoPrimaryViewController:(UIViewController *)primaryViewController {\n if ([secondaryViewController isKindOfClass:[UINavigationController class]] && [[(UINavigationController *)secondaryViewController topViewController] isKindOfClass:[DetailViewController class]] && ([(DetailViewController *)[(UINavigationController *)secondaryViewController topViewController] detailItem] == nil)) {\n // Return YES to indicate that we have handled the collapse by doing nothing; the secondary controller will be discarded.\n return YES;\n } else {\n return NO;\n }\n}\n\n@end\n", "meta": {"content_hash": "860775067fb2a296bb2468dd3aeb795f", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 318, "avg_line_length": 60.58, "alnum_prop": 0.7949818421921426, "repo_name": "AmitaiB/TDD_PracticeProject", "id": "7908e8467cb39e00604f9bc55c953f3d32c7dc4b", "size": "3252", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "AppDelegate.m", "mode": "33188", "license": "mit", "language": [{"name": "Objective-C", "bytes": "14973"}, {"name": "Ruby", "bytes": "386"}]}} {"text": "class Solution < ActiveRecord::Base\n belongs_to :user; belongs_to :issue\nend\n", "meta": {"content_hash": "6281e6baa3e03aaeef53359e7b937495", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 37, "avg_line_length": 26.0, "alnum_prop": 0.7564102564102564, "repo_name": "dmc2015/securitypulse", "id": "f1d20fe7979c1f73ce0569fc6d4a847e58499428", "size": "78", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/models/solution.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "55583"}, {"name": "CoffeeScript", "bytes": "948"}, {"name": "HTML", "bytes": "21737"}, {"name": "JavaScript", "bytes": "729"}, {"name": "Ruby", "bytes": "55029"}]}} {"text": "\ufeff/*Problem 9. Sorting array\r\n\r\nWrite a method that return the maximal element in a portion of array of integers starting at given index.\r\nUsing it write another method that sorts an array in ascending / descending order.*/\r\n\r\nusing System;\r\n\r\nclass Program\r\n{\r\n static void Main()\r\n {\r\n Console.Write(\"Enter array length n: \");\r\n int n = int.Parse(Console.ReadLine());\r\n\r\n Console.WriteLine(\"\\nEnter {0} number(s) to array:\", n);\r\n int[] array = InputArrayNumbers(n);\r\n\r\n Console.Write(\"Enter start index: \");\r\n int start = int.Parse(Console.ReadLine());\r\n\r\n Console.Write(\"Enter end index: \");\r\n int end = int.Parse(Console.ReadLine());\r\n\r\n Console.WriteLine(\"Max element in interval [{0}, {1}] -> {2}\", start, end, GetMaxElementInInterval(array, start, end));\r\n Console.WriteLine(\"Numbers in Ascending order: {0}\", string.Join(\" \", SortAscending(array)));\r\n Console.WriteLine(\"Numbers in Descending order: {0}\", string.Join(\" \", SortDescending(array)));\r\n }\r\n\r\n static int[] InputArrayNumbers(int length)\r\n {\r\n int[] array = new int[length];\r\n for (int i = 0; i < length; i++)\r\n {\r\n array[i] = int.Parse(Console.ReadLine());\r\n }\r\n return array;\r\n }\r\n\r\n static int GetMaxElementInInterval(int[] numbers, int start, int end, int swapIndex = 0)\r\n {\r\n if (start < 0 || start >= numbers.Length || end < 0 || end >= numbers.Length)\r\n {\r\n throw new IndexOutOfRangeException();\r\n }\r\n\r\n int maxIndex = start;\r\n\r\n for (int i = start; i <= end; i++)\r\n {\r\n if (numbers[maxIndex] < numbers[i])\r\n {\r\n maxIndex = i;\r\n }\r\n }\r\n\r\n int temp = numbers[swapIndex];\r\n numbers[swapIndex] = numbers[maxIndex];\r\n numbers[maxIndex] = temp;\r\n\r\n return numbers[swapIndex];\r\n }\r\n\r\n static int[] SortAscending(int[] array)\r\n {\r\n int[] sorted = new int[array.Length];\r\n for (int i = array.Length - 1; i >= 0; i--)\r\n {\r\n sorted[i] = GetMaxElementInInterval(array, 0, i, i);\r\n }\r\n return sorted;\r\n }\r\n\r\n static int[] SortDescending(int[] array)\r\n {\r\n int[] sorted = new int[array.Length];\r\n for (int i = 0; i < array.Length; i++)\r\n {\r\n sorted[i] = GetMaxElementInInterval(array, i, array.Length - 1, i);\r\n }\r\n return sorted;\r\n }\r\n}", "meta": {"content_hash": "5b5d2748c3e62d5e4a57434b6680eb96", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 127, "avg_line_length": 30.4390243902439, "alnum_prop": 0.5408653846153846, "repo_name": "MarinMarinov/C-Sharp-Part-2", "id": "df4f48a9d67b7bb0f65c342f6bf5a47b0c24807f", "size": "2498", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Homework 03-Methods/Problem 09. Sorting array/Program.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "269821"}]}} {"text": "//\n// Copyright (c) 2010 Dariusz Gadomski \n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without modification,\n// are permitted provided that the following conditions are met:\n//\n// * Redistributions of source code must retain the above copyright notice, this\n// list of conditions and the following disclaimer.\n// * Redistributions in binary form must reproduce the above copyright notice,\n// this list of conditions and the following disclaimer in the documentation and/or\n// other materials provided with the distribution.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''\n// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR\n// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON\n// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n\n#include \n\n#include \n\nnamespace debug\n{\n\nDebugLevel _verbosity_;\n\nstruct nullstream: std::ostream\n{\n nullstream() :\n std::ostream(0)\n {\n }\n};\n\nstd::ostream& dbg(DebugLevel level)\n{\n static nullstream dummystream;\n if (level >= getVerbosity())\n return std::cout;\n\n return dummystream;\n}\n\nint timeval_subtract(struct timeval *result, struct timeval *x,\n struct timeval *y)\n{\n /* Perform the carry for the later subtraction by updating y. */\n if (x->tv_usec < y->tv_usec)\n {\n int nsec = (y->tv_usec - x->tv_usec) / 1000000 + 1;\n y->tv_usec -= 1000000 * nsec;\n y->tv_sec += nsec;\n }\n if (x->tv_usec - y->tv_usec > 1000000)\n {\n int nsec = (y->tv_usec - x->tv_usec) / 1000000;\n y->tv_usec += 1000000 * nsec;\n y->tv_sec -= nsec;\n }\n\n /* Compute the time remaining to wait.\n tv_usec is certainly positive. */\n result->tv_sec = x->tv_sec - y->tv_sec;\n result->tv_usec = x->tv_usec - y->tv_usec;\n\n /* Return 1 if result is negative. */\n return x->tv_sec < y->tv_sec;\n}\n\nint timeval_add(struct timeval *result, struct timeval *x,\n struct timeval *y)\n{\n struct timeval tmp;\n long carry = 0;\n tmp.tv_usec = x->tv_usec + y->tv_usec;\n if( tmp.tv_usec >= 1000000 )\n {\n ++carry;\n tmp.tv_usec %=1000000;\n }\n\n tmp.tv_sec = x->tv_sec + y->tv_sec + carry;\n\n result->tv_sec = tmp.tv_sec;\n result->tv_usec = tmp.tv_usec;\n\n return carry;\n}\n\n}\n", "meta": {"content_hash": "aa61923cace41118862354f7b8678c4e", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 83, "avg_line_length": 29.540816326530614, "alnum_prop": 0.6625215889464594, "repo_name": "dargad/network-traffic-prediction", "id": "3b83d5fb13872c1ed9fb5e4c272728737d7012f9", "size": "2895", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "models/src/util.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C++", "bytes": "168500"}, {"name": "Shell", "bytes": "2071"}]}} {"text": "\n\n \n \n \n coquelicot: Not compatible \ud83d\udc7c\n \n \n \n \n \n \n \n \n \n \n
\n \n
\n
\n
\n \u00ab Up\n

\n coquelicot\n \n 2.1.1\n Not compatible \ud83d\udc7c\n \n

\n

\ud83d\udcc5 (2022-04-22 09:36:58 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-threads        base\nbase-unix           base\nconf-findutils      1           Virtual package relying on findutils\ncoq                 8.12.2      Formal proof management system\nnum                 1.4         The legacy Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.10.2      The OCaml compiler (virtual package)\nocaml-base-compiler 4.10.2      Official release 4.10.2\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.3       A library manager for OCaml\n# opam file:\nopam-version: "2.0"\nmaintainer: "guillaume.melquiond@inria.fr"\nhomepage: "http://coquelicot.saclay.inria.fr/"\ndev-repo: "git+https://gitlab.inria.fr/coquelicot/coquelicot.git"\nbug-reports: "https://gitlab.inria.fr/coquelicot/coquelicot/issues"\nlicense: "LGPL-3.0-or-later"\nbuild: [\n  ["./configure"]\n  ["./remake" "-j%{jobs}%"]\n]\ninstall: ["./remake" "install"]\ndepends: [\n  "coq" {>= "8.4pl4" & < "8.6~"}\n  "coq-mathcomp-ssreflect" {>= "1.6"}\n]\ntags: [ "keyword:real analysis" "keyword:topology" "keyword:filters" "keyword:metric spaces" "category:Mathematics/Real Calculus and Topology" ]\nauthors: [ "Sylvie Boldo <sylvie.boldo@inria.fr>" "Catherine Lelay <catherine.lelay@inria.fr>" "Guillaume Melquiond <guillaume.melquiond@inria.fr>" ]\nsynopsis: "A Coq formalization of real analysis compatible with the standard library"\nurl {\n  src: "https://coquelicot.gitlabpages.inria.fr/releases/coquelicot-2.1.1.tar.gz"\n  checksum: "md5=bd648a43a06f422ee6ba886f93d0a534"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-coquelicot.2.1.1 coq.8.12.2
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.12.2).\nThe following dependencies couldn't be met:\n  - coq-coquelicot -> coq < 8.6~ -> ocaml < 4.06.0\n      base of this switch (use `--unlock-base' to force)\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-coquelicot.2.1.1
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "c5add0dcdac23f8c79d5cc038477de2c", "timestamp": "", "source": "github", "line_count": 160, "max_line_length": 197, "avg_line_length": 42.56875, "alnum_prop": 0.5420643077374835, "repo_name": "coq-bench/coq-bench.github.io", "id": "36d1231805ffb5a1cdfdb73232cf32e7a9ebcb46", "size": "6836", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.10.2-2.0.6/released/8.12.2/coquelicot/2.1.1.html", "mode": "33188", "license": "mit", "language": []}} {"text": "\n\n \n \n \n canon-bdds: Not compatible \ud83d\udc7c\n \n \n \n \n \n \n \n \n \n \n
\n \n
\n
\n
\n \u00ab Up\n

\n canon-bdds\n \n 8.10.0\n Not compatible \ud83d\udc7c\n \n

\n

\ud83d\udcc5 (2022-04-02 17:44:00 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-threads        base\nbase-unix           base\ncamlp5              7.14        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\nconf-perl           2           Virtual package relying on perl\ncoq                 8.7.1+1     Formal proof management system\nnum                 1.4         The legacy Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.06.1      The OCaml compiler (virtual package)\nocaml-base-compiler 4.06.1      Official 4.06.1 release\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.3       A library manager for OCaml\n# opam file:\nopam-version: "2.0"\nmaintainer: "Hugo.Herbelin@inria.fr"\nhomepage: "https://github.com/coq-contribs/canon-bdds"\nlicense: "Unknown"\nbuild: [make "-j%{jobs}%"]\ninstall: [make "install"]\nremove: ["rm" "-R" "%{lib}%/coq/user-contrib/CanonBDDs"]\ndepends: [\n  "ocaml"\n  "coq" {>= "8.10" & < "8.11~"}\n]\ntags: [\n  "keyword: BDD"\n  "keyword: BDT"\n  "keyword: finite sets"\n  "keyword: model checking"\n  "keyword: binary decision diagrams"\n  "category: Computer Science/Decision Procedures and Certified Algorithms/Decision procedures"\n  "category: Miscellaneous/Extracted Programs/Decision procedures"\n]\nauthors: [\n  "Emmanuel Ledinot"\n]\nbug-reports: "https://github.com/coq-contribs/canon-bdds/issues"\ndev-repo: "git+https://github.com/coq-contribs/canon-bdds.git"\nsynopsis: "Canonicity of Binary Decision Dags"\ndescription: """\nA proof of unicity and canonicity of Binary Decision Trees and\nBinary Decision Dags. This contrib contains also a development on finite sets."""\nflags: light-uninstall\nurl {\n  src: "https://github.com/coq-contribs/canon-bdds/archive/v8.10.0.tar.gz"\n  checksum: "md5=d23b9b74b3a8af434c1e1907d24c4a9d"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-canon-bdds.8.10.0 coq.8.7.1+1
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.7.1+1).\nThe following dependencies couldn't be met:\n  - coq-canon-bdds -> coq >= 8.10\nYour request can't be satisfied:\n  - No available version of coq satisfies the constraints\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-canon-bdds.8.10.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "7d3275022dcd9d6f2f9072c2b4235857", "timestamp": "", "source": "github", "line_count": 175, "max_line_length": 159, "avg_line_length": 40.65714285714286, "alnum_prop": 0.5498243148278286, "repo_name": "coq-bench/coq-bench.github.io", "id": "697add0f6d53a94f193a693766161358df0ffcaa", "size": "7140", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.06.1-2.0.5/released/8.7.1+1/canon-bdds/8.10.0.html", "mode": "33188", "license": "mit", "language": []}} {"text": "package eu.restio.designernews.fragments;\n\nimport android.os.AsyncTask;\nimport android.os.Bundle;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\n\n\nimport com.google.gson.Gson;\nimport com.google.gson.reflect.TypeToken;\n\nimport java.lang.reflect.Type;\nimport java.util.ArrayList;\n\nimport eu.restio.designernews.adapters.JobsListViewAdapter;\nimport eu.restio.designernews.MainActivity;\nimport eu.restio.designernews.R;\nimport eu.restio.designernews.models.Job;\nimport eu.restio.designernews.network.API;\n\npublic class JobsFragment extends android.support.v4.app.ListFragment {\n\n public ArrayList jobs_list;\n private JobsListViewAdapter adapter;\n\n public static JobsFragment newInstance() {\n return new JobsFragment();\n }\n public JobsFragment() {\n }\n\n @Override\n public void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n }\n\n @Override\n public View onCreateView(LayoutInflater inflater, ViewGroup container,\n Bundle savedInstanceState) {\n View rootView = inflater.inflate(R.layout.fragment_jobs, container, false);\n adapter = new JobsListViewAdapter(getActivity(), jobs_list);\n setListAdapter(adapter);\n new JobsFetcher().execute();\n return rootView;\n }\n\n class JobsFetcher extends AsyncTask {\n\n @Override\n protected String doInBackground(Void... params) {\n API a = API.getInstance();\n return a.prefetch_jobs();\n }\n\n @Override\n protected void onPostExecute(String result) {\n try {\n if (result == null) {\n MainActivity a = (MainActivity) getActivity();\n a.raiseNetworkError();\n }\n\n Gson gson = new Gson();\n Type listType = new TypeToken>() {}.getType();\n\n jobs_list = gson.fromJson(result, listType);\n adapter.addAll(jobs_list);\n adapter.notifyDataSetChanged();\n\n } catch (NullPointerException e) {\n e.printStackTrace();\n }\n }\n }\n\n}\n", "meta": {"content_hash": "6f23c703df38a20439c22d7cf50f4f02", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 83, "avg_line_length": 28.792207792207794, "alnum_prop": 0.6377988272440235, "repo_name": "sharpfuryz/designernews", "id": "58670b8a8dc2c6e97c2fdf993f7be52dfdb1e952", "size": "2217", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/eu/restio/designernews/fragments/JobsFragment.java", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Java", "bytes": "40379"}]}} {"text": "package com.zen.member;\n\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.data.domain.Page;\nimport org.springframework.data.domain.Pageable;\nimport org.springframework.stereotype.Service;\n\n@Service\npublic class MemberService {\n\n @Autowired\n private MemberDAO memberDAO;\n\n public Member findByMobile(final String mobile) {\n return memberDAO.selectByMobile(mobile);\n }\n\n public Member findByEmail(final String mobile) {\n return memberDAO.selectByEmail(mobile);\n }\n\n public Page findAll(final Pageable pageable) {\n return memberDAO.findAll(pageable);\n }\n\n\n}\n", "meta": {"content_hash": "9ea1c4cfe31e048a2fb01045eaeb85cf", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 62, "avg_line_length": 23.0, "alnum_prop": 0.7793880837359098, "repo_name": "nickevin/spring-boot-demo", "id": "90f8e2320383055988b3c35db7e733ac202fdaff", "size": "621", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/zen/member/MemberService.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "18548"}, {"name": "JavaScript", "bytes": "1179"}]}} {"text": "(function() {\n 'use strict';\n\n angular\n .module('gastronomeeApp')\n .config(stateConfig);\n\n stateConfig.$inject = ['$stateProvider'];\n\n function stateConfig($stateProvider) {\n $stateProvider\n .state('restaurant-order', {\n parent: 'entity',\n url: '/restaurant-order?page&sort&search',\n data: {\n authorities: ['ROLE_USER'],\n pageTitle: 'gastronomeeApp.restaurantOrder.home.title'\n },\n views: {\n 'content@': {\n templateUrl: 'app/entities/restaurant-order/restaurant-orders.html',\n controller: 'RestaurantOrderController',\n controllerAs: 'vm'\n }\n },\n params: {\n page: {\n value: '1',\n squash: true\n },\n sort: {\n value: 'id,asc',\n squash: true\n },\n search: null\n },\n resolve: {\n pagingParams: ['$stateParams', 'PaginationUtil', function ($stateParams, PaginationUtil) {\n return {\n page: PaginationUtil.parsePage($stateParams.page),\n sort: $stateParams.sort,\n predicate: PaginationUtil.parsePredicate($stateParams.sort),\n ascending: PaginationUtil.parseAscending($stateParams.sort),\n search: $stateParams.search\n };\n }],\n translatePartialLoader: ['$translate', '$translatePartialLoader', function ($translate, $translatePartialLoader) {\n $translatePartialLoader.addPart('restaurantOrder');\n $translatePartialLoader.addPart('restaurantOrderStatus');\n $translatePartialLoader.addPart('global');\n return $translate.refresh();\n }]\n }\n })\n .state('restaurant-order-detail', {\n parent: 'restaurant-order',\n url: '/restaurant-order/{id}',\n data: {\n authorities: ['ROLE_USER'],\n pageTitle: 'gastronomeeApp.restaurantOrder.detail.title'\n },\n views: {\n 'content@': {\n templateUrl: 'app/entities/restaurant-order/restaurant-order-detail.html',\n controller: 'RestaurantOrderDetailController',\n controllerAs: 'vm'\n }\n },\n resolve: {\n translatePartialLoader: ['$translate', '$translatePartialLoader', function ($translate, $translatePartialLoader) {\n $translatePartialLoader.addPart('restaurantOrder');\n $translatePartialLoader.addPart('restaurantOrderStatus');\n return $translate.refresh();\n }],\n entity: ['$stateParams', 'RestaurantOrder', function($stateParams, RestaurantOrder) {\n return RestaurantOrder.get({id : $stateParams.id}).$promise;\n }],\n previousState: [\"$state\", function ($state) {\n var currentStateData = {\n name: $state.current.name || 'restaurant-order',\n params: $state.params,\n url: $state.href($state.current.name, $state.params)\n };\n return currentStateData;\n }]\n }\n })\n .state('restaurant-order-detail.edit', {\n parent: 'restaurant-order-detail',\n url: '/detail/edit',\n data: {\n authorities: ['ROLE_USER']\n },\n onEnter: ['$stateParams', '$state', '$uibModal', function($stateParams, $state, $uibModal) {\n $uibModal.open({\n templateUrl: 'app/entities/restaurant-order/restaurant-order-dialog.html',\n controller: 'RestaurantOrderDialogController',\n controllerAs: 'vm',\n backdrop: 'static',\n size: 'lg',\n resolve: {\n entity: ['RestaurantOrder', function(RestaurantOrder) {\n return RestaurantOrder.get({id : $stateParams.id}).$promise;\n }]\n }\n }).result.then(function() {\n $state.go('^', {}, { reload: false });\n }, function() {\n $state.go('^');\n });\n }]\n })\n .state('restaurant-order.new', {\n parent: 'restaurant-order',\n url: '/new',\n data: {\n authorities: ['ROLE_USER']\n },\n onEnter: ['$stateParams', '$state', '$uibModal', function($stateParams, $state, $uibModal) {\n $uibModal.open({\n templateUrl: 'app/entities/restaurant-order/restaurant-order-dialog.html',\n controller: 'RestaurantOrderDialogController',\n controllerAs: 'vm',\n backdrop: 'static',\n size: 'lg',\n resolve: {\n entity: function () {\n return {\n rate: null,\n persons: null,\n comment: null,\n created: null,\n updated: null,\n status: null,\n id: null\n };\n }\n }\n }).result.then(function() {\n $state.go('restaurant-order', null, { reload: 'restaurant-order' });\n }, function() {\n $state.go('restaurant-order');\n });\n }]\n })\n .state('restaurant-order.edit', {\n parent: 'restaurant-order',\n url: '/{id}/edit',\n data: {\n authorities: ['ROLE_USER']\n },\n onEnter: ['$stateParams', '$state', '$uibModal', function($stateParams, $state, $uibModal) {\n $uibModal.open({\n templateUrl: 'app/entities/restaurant-order/restaurant-order-dialog.html',\n controller: 'RestaurantOrderDialogController',\n controllerAs: 'vm',\n backdrop: 'static',\n size: 'lg',\n resolve: {\n entity: ['RestaurantOrder', function(RestaurantOrder) {\n return RestaurantOrder.get({id : $stateParams.id}).$promise;\n }]\n }\n }).result.then(function() {\n $state.go('restaurant-order', null, { reload: 'restaurant-order' });\n }, function() {\n $state.go('^');\n });\n }]\n })\n .state('restaurant-order.delete', {\n parent: 'restaurant-order',\n url: '/{id}/delete',\n data: {\n authorities: ['ROLE_USER']\n },\n onEnter: ['$stateParams', '$state', '$uibModal', function($stateParams, $state, $uibModal) {\n $uibModal.open({\n templateUrl: 'app/entities/restaurant-order/restaurant-order-delete-dialog.html',\n controller: 'RestaurantOrderDeleteController',\n controllerAs: 'vm',\n size: 'md',\n resolve: {\n entity: ['RestaurantOrder', function(RestaurantOrder) {\n return RestaurantOrder.get({id : $stateParams.id}).$promise;\n }]\n }\n }).result.then(function() {\n $state.go('restaurant-order', null, { reload: 'restaurant-order' });\n }, function() {\n $state.go('^');\n });\n }]\n });\n }\n\n})();\n", "meta": {"content_hash": "e80b46e8ca42bdadfa014dff3d58b68f", "timestamp": "", "source": "github", "line_count": 197, "max_line_length": 130, "avg_line_length": 41.796954314720814, "alnum_prop": 0.43429681807141124, "repo_name": "goxhaj/gastronomee", "id": "2b018b0e846135d1153ad6ce6fe60c2004f2112d", "size": "8234", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/webapp/app/entities/restaurant-order/restaurant-order.state.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "5006"}, {"name": "CSS", "bytes": "10726"}, {"name": "Gherkin", "bytes": "179"}, {"name": "HTML", "bytes": "289485"}, {"name": "Java", "bytes": "600452"}, {"name": "JavaScript", "bytes": "429624"}, {"name": "Scala", "bytes": "37498"}, {"name": "Shell", "bytes": "7058"}]}} {"text": "Chat application\n================\n\nRed5 WebSocket chat application example.\n\nThe example index.html defaults to using a WebSocket connection to localhost on port 5080. This means that the host and port are riding the same host and port as the http connector which is configured in the red5/conf/jee-container.xml file. Two new steps are required to migrate from the previous versions:\n\n # Add the `websocketEnabled` to the Tomcat server entry in the `conf/jee-container.xml` file\n \n```xml\n \n```\n\n # Add the WebSocket filter servlet to webapps that require WebSocket support\n \n```xml\n \n WebSocketFilter\n org.red5.net.websocket.server.WsFilter\n true\n \n \n WebSocketFilter\n /*\n REQUEST\n FORWARD\n \n```\n\nLastly, remove any separate `webSocketTransport` beans from the `conf/jee-container.xml` file.\n\n```xml\n\n \n \n localhost:8081\n \n \n\n```\n\nBuild the application from the command line with\n\n```sh\nmvn package\n```\n\nDeploy your application by copying the war file into your red5/webapps directory.\n\nAfter deploy is complete, go to http://localhost:5080/chat/ in your browser (open two tabs if you want to chat back and forth on the same computer).\n\nPre-compiled WAR\n----------------\nYou can find [compiled artifacts via Maven](http://mvnrepository.com/artifact/org.red5.demos/chat)\n\n[Direct Download](https://oss.sonatype.org/content/repositories/releases/org/red5/demos/chat/2.0.0/chat-2.0.0.war)\n", "meta": {"content_hash": "982c0e040e30a458343908d729c90fa0", "timestamp": "", "source": "github", "line_count": 56, "max_line_length": 321, "avg_line_length": 35.660714285714285, "alnum_prop": 0.6910365548322484, "repo_name": "Red5/red5-websocket-chat", "id": "ca4b601a5f3c024338a7d5c7701bb69631dfac53", "size": "1997", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [{"name": "HTML", "bytes": "7758"}, {"name": "Java", "bytes": "14250"}, {"name": "JavaScript", "bytes": "26351"}]}} {"text": "require \"droom/monkeys\"\nrequire \"droom/lazy_hash\"\nrequire \"droom/renderers\"\nrequire \"droom/engine\"\nrequire \"droom/auth_cookie\"\nrequire \"droom/validators\"\nrequire \"droom/searchability\"\nrequire \"droom/taggability\"\nrequire \"droom/folders\"\nrequire \"mail_form\"\n\nmodule Droom \n # Droom configuration is handled by accessors on the Droom base module.\n # Boolean items also offer the interrogative form.\n \n mattr_accessor :root_path,\n :home_url,\n :suggestible_classes,\n :searchable_classes,\n :yt_client,\n :layout,\n :devise_layout,\n :email_layout,\n :email_host,\n :email_from,\n :email_from_name,\n :email_return_path,\n :main_dashboard_modules,\n :margin_dashboard_modules,\n :panels,\n :scrap_types,\n :default_scrap_type,\n :use_chinese_names,\n :use_biogs,\n :use_separate_mobile_number,\n :use_titles,\n :use_honours,\n :use_organisations,\n :enable_mailing_lists,\n :mailman_table_name,\n :mailing_lists_active_by_default,\n :mailing_lists_digest_by_default,\n :show_venue_map,\n :dropbox_app_key,\n :dropbox_app_secret,\n :dropbox_app_name,\n :user_defaults,\n :people_sort,\n :required_calendar_names,\n :stream_shared,\n :aws_bucket_name,\n :all_events_public,\n :all_documents_public,\n :password_pattern,\n :separate_calendars,\n :second_time_zone,\n :require_login_permission,\n :default_permissions\n \n class DroomError < StandardError; end\n class AuthRequired < DroomError; end\n class PermissionDenied < DroomError; end\n class PasswordRequired < DroomError; end\n\n class << self\n def home_url\n @@home_url ||= \"http://example.com\"\n end\n \n def layout\n @@layout ||= \"application\"\n end\n\n def devise_layout\n @@devise_layout ||= \"application\"\n end\n\n def email_host\n @@email_host ||= \"please-change-email-host-in-droom-initializer.example.com\"\n end\n\n def email_layout\n @@email_layout ||= \"email\"\n end\n\n def email_from\n @@email_from ||= \"please-change-email_from-in-droom-initializer@example.com\"\n end\n\n def email_from_name\n @@email_from ||= \"Please Set Email-From Name In Droom Initializer\"\n end\n\n def email_return_path\n @@email_return_path ||= email_from\n end\n\n def people_sort\n @@people_sort ||= \"position ASC\"\n end\n\n def sign_out_path\n @@sign_out_path ||= \"/users/sign_out\"\n end\n\n def root_path\n @@root_path ||= \"dashboard#index\"\n end\n\n def main_dashboard_modules\n @@main_dashboard_modules ||= %w{my_future_events my_folders}\n end\n\n def margin_dashboard_modules\n @@margin_dashboard_modules ||= %w{quicksearch stream}\n end\n \n def panels\n @@panels ||= %w{configuration search admin}\n end\n \n def scrap_types\n @@scrap_types ||= %w{image text quote link event document}\n end\n \n def default_scrap_type\n @@default_scrap_type ||= 'text'\n end\n\n def use_chinese_names?\n !!@@use_chinese_names\n end\n\n def use_titles?\n !!@@use_titles\n end\n \n def use_honours?\n !!@@use_honours\n end\n \n def use_biogs?\n !!@@use_biogs\n end\n \n def use_organisations?\n !!@@use_organisations\n end\n\n def stream_shared?\n !!@@stream_shared\n end\n\n def use_separate_mobile_number?\n !!@@use_separate_mobile_number\n end\n\n def enable_mailing_lists?\n !!@@enable_mailing_lists\n end\n\n def calendar_closed?\n !!@@calendar_closed\n end\n \n def all_events_public?\n !!@@all_events_public\n end\n \n def all_documents_public?\n !!@@all_documents_public\n end\n\n def dropbox_app_name\n @@dropbox_app_name ||= 'droom'\n end\n\n def mailman_table_name\n @@mailman_table_name ||= 'mailman_mysql'\n end\n\n def mailing_lists_active_by_default?\n !!@@mailing_lists_active_by_default\n end\n\n def mailing_lists_digest_by_default?\n !!@@mailing_lists_digest_by_default\n end\n\n def show_venue_map?\n !!@@show_venue_map\n end\n\n def suggestible_classes\n @@suggestible_classes ||= {\n \"event\" => \"Droom::Event\", \n \"user\" => \"Droom::User\", \n \"document\" => \"Droom::Document\",\n \"group\" => \"Droom::Group\",\n \"venue\" => \"Droom::Venue\"\n }\n end\n\n def add_suggestible_class(label, klass=nil)\n klass ||= label.camelize\n suggestible_classes[label] = klass.to_s\n end\n\n def yt_client\n @@yt_client ||= YouTubeIt::Client.new(:dev_key => \"AI39si473p0K4e6id0ZrM1vniyk8pdbqr67hH39hyFjW_JQoLg9xi6BecWFtraoPMCeYQmRgIc_XudGKVU8tmeQF8VHwjOUg8Q\")\n end\n\n def aws_bucket_name\n @@aws_bucket_name ||= nil\n end\n\n def aws_bucket\n @@aws_bucket ||= Fog::Storage.new(Droom::Engine.config.paperclip_defaults[:fog_credentials]).directories.get(@@aws_bucket_name)\n end\n\n def required_calendar_names\n @@required_calendar_names ||= %w{main stream}\n end\n \n def separate_calendars?\n !!@@separate_calendars\n end\n \n def second_time_zone?\n !!@@second_time_zone\n end\n \n def password_pattern\n @@password_pattern ||= \".{6,}\"\n end\n \n def require_login_permission?\n !!@@require_login_permission\n end\n \n def default_permissions\n @@default_permissions ||= %w{droom.login droom.calendar droom.directory droom.attach droom.library}\n end\n \n \n # Droom's preferences are arbitrary and open-ended. You can ask for any preference key: if it \n # doesn't exist you just get back the default value, or nil if there isn't one. This is where you\n # set the defaults.\n #\n def user_defaults\n @@user_defaults ||= Droom::LazyHash.new({\n :email => {\n :enabled? => true,\n :mailing_lists? => true,\n :event_invitations? => false,\n :digest? => false\n },\n :dropbox => {\n :strategy => \"clicked\",\n :events? => true,\n }\n })\n end\n \n # Here we are overriding droom default settings in a host app initializer to create local default settings.\n # key should be dot-separated and string-like:\n #\n # Droom.set_default('email.digest', true)\n #\n # LazyHash#deep_set is a setter that can take compound keys and set nested values. It's defined in lib/lazy_hash.rb.\n #\n def set_user_default(key, value)\n user_defaults.set(key, value)\n end\n \n def user_default(key)\n user_defaults.get(key)\n end\n end\nend\n", "meta": {"content_hash": "e959be05e562e0154d8984796f72034f", "timestamp": "", "source": "github", "line_count": 280, "max_line_length": 157, "avg_line_length": 25.014285714285716, "alnum_prop": 0.5646773272415763, "repo_name": "spanner/droom", "id": "9c97f646c42c061d7cbc7dfa1e9d363be006a867", "size": "7004", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/droom.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "55911"}, {"name": "CoffeeScript", "bytes": "86057"}, {"name": "Gherkin", "bytes": "1699"}, {"name": "HTML", "bytes": "127765"}, {"name": "JavaScript", "bytes": "514603"}, {"name": "Roff", "bytes": "16257"}, {"name": "Ruby", "bytes": "289814"}]}} {"text": "\n\n\n@class APElement;\n\n\n@interface APDocument : NSObject {\n\tAPElement *rootElement;\n}\n\n+ (id)documentWithXMLString:(NSString*)anXMLString;\n- (id)initWithRootElement:(APElement*)aRootElement;\n- (id)initWithString:(NSString*)anXMLString;\n- (APElement*)rootElement;\n- (NSString*)prettyXML;\n- (NSString*)xml;\n\n@end\n", "meta": {"content_hash": "8bb022073d48509c338b43c03b82117e", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 51, "avg_line_length": 17.22222222222222, "alnum_prop": 0.7451612903225806, "repo_name": "lechium/yourTubeiOS", "id": "7dc317fe0d4d05cdf525609048c1ec75a2f6331a", "size": "1094", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "yourTube/APDocument/APDocument.h", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "841"}, {"name": "C++", "bytes": "13317"}, {"name": "CSS", "bytes": "3351"}, {"name": "HTML", "bytes": "7938"}, {"name": "JavaScript", "bytes": "44689"}, {"name": "Logos", "bytes": "3192"}, {"name": "Makefile", "bytes": "139947"}, {"name": "Objective-C", "bytes": "2765772"}, {"name": "Objective-C++", "bytes": "68419"}, {"name": "Perl", "bytes": "141095"}, {"name": "Perl 6", "bytes": "911"}, {"name": "Shell", "bytes": "3821"}, {"name": "Vim script", "bytes": "3569"}]}} {"text": "\n\n\n \n \n \n \n \n \n \n", "meta": {"content_hash": "f842c2afab7c849d910b967669952bc7", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 71, "avg_line_length": 38.10526315789474, "alnum_prop": 0.6284530386740331, "repo_name": "jaychang0917/android.nRecyclerView", "id": "1eae583b24e5143124108c120da280adca032ee2", "size": "724", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/res/layout/cell_loading.xml", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "//\n// Ce fichier a \u00e9t\u00e9 g\u00e9n\u00e9r\u00e9 par l'impl\u00e9mentation de r\u00e9f\u00e9rence JavaTM Architecture for XML Binding (JAXB), v2.2.7 \n// Voir http://java.sun.com/xml/jaxb \n// Toute modification apport\u00e9e \u00e0 ce fichier sera perdue lors de la recompilation du sch\u00e9ma source. \n// G\u00e9n\u00e9r\u00e9 le : 2018.01.08 \u00e0 11:19:58 AM GMT \n//\n\n\npackage com.in28minutes.courses;\n\nimport javax.xml.bind.annotation.XmlAccessType;\nimport javax.xml.bind.annotation.XmlAccessorType;\nimport javax.xml.bind.annotation.XmlRootElement;\nimport javax.xml.bind.annotation.XmlType;\n\n\n/**\n *

Classe Java pour anonymous complex type.\n * \n *

Le fragment de sch\u00e9ma suivant indique le contenu attendu figurant dans cette classe.\n * \n *

\n * <complexType>\n *   <complexContent>\n *     <restriction base=\"{http://www.w3.org/2001/XMLSchema}anyType\">\n *     </restriction>\n *   </complexContent>\n * </complexType>\n * 
\n * \n * \n */\n@XmlAccessorType(XmlAccessType.FIELD)\n@XmlType(name = \"\")\n@XmlRootElement(name = \"GetAllCourseDetailsRequest\")\npublic class GetAllCourseDetailsRequest {\n\n\n}\n", "meta": {"content_hash": "8b972687503ad3224e755d1ef318e511", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 112, "avg_line_length": 28.205128205128204, "alnum_prop": 0.7218181818181818, "repo_name": "jamalgithub/workdev", "id": "4da6e5ccc4583bfdf2e08cc0683e90a5e3ba3293", "size": "1116", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "in28Munites-soap-course-management/src/main/java/com/in28minutes/courses/GetAllCourseDetailsRequest.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "12727"}, {"name": "Java", "bytes": "2899130"}, {"name": "PLSQL", "bytes": "5617"}, {"name": "TSQL", "bytes": "2886"}]}} {"text": "FROM ubuntu:14.04\n\nENV DEBIAN_FRONTEND noninteractive\n\nRUN apt-get update && apt-get upgrade -y && \\\n apt-get install -y openjdk-7-jre-headless\n", "meta": {"content_hash": "f1e8113add9488ea03797d5d87b5d1a2", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 45, "avg_line_length": 24.5, "alnum_prop": 0.7278911564625851, "repo_name": "zooniverse/docker-java", "id": "e6024e1a7576dc451cafa099444cc7af405777f6", "size": "147", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Dockerfile", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\n \n \n A Veteran reported to an outpatient VA clinic today and submitted a document that pertained to another Veteran. Several weeks ago this veteran was provided a future appointment reminder document which pertained to another Veteran. This Veteran was turning this into the clinic after he now realized that the document pertained to another Veteran.\n B66DAE70-62A0-11E3-958A-14109FCE954D\n \n 2013-01-01T00:00:00\n \n \n \n \n \n swidup\n \n\n \n \n \n \n \n United States Department of Veterans Affairs\n \n \n \n \n US\n \n \n PA\n \n \n \n\n \n \n \n Documents\n \n \n Confidentiality\n Medical\n \n \n \n \n \n \n \n \n \n Yes\n Unknown\n \n \n vcdb\n \n \n \n veris2stix\n MITRE\n 0.1\n \n \n VERIS schema\n Verizon\n 1.3.0\n \n \n \n http://vcdb.org/pdf/va-security.pdf\n \n \n \n \n \n \n \n Insider Threat\n Unknown\n \n \n NA\n \n \n \n\n", "meta": {"content_hash": "3df67bc3fdb3cbf0674a33c0a602d5bf", "timestamp": "", "source": "github", "line_count": 113, "max_line_length": 391, "avg_line_length": 65.52212389380531, "alnum_prop": 0.6704484062668827, "repo_name": "rpiazza/veris-to-stix", "id": "05a4cc0f9566007a14ca9ae9bd20088b7ce46ab5", "size": "7404", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "results/B66DAE70-62A0-11E3-958A-14109FCE954D.xml", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Python", "bytes": "48356"}]}} {"text": "\r\n#ifndef itkCacheableScalarFunction_h\r\n#define itkCacheableScalarFunction_h\r\n\r\n#include \"itkArray.h\"\r\n#include \"itkIntTypes.h\"\r\n#include \"ITKBiasCorrectionExport.h\"\r\n\r\nnamespace itk\r\n{\r\n/** \\class CacheableScalarFunction\r\n * \\brief function cache implementation\r\n *\r\n * This is the base class for continuous scalar functions which\r\n * needs cache for their pre-evaluated function returns.\r\n *\r\n * The internal cache is created using the upper- and lower-bound domain\r\n * values of the functional form (f(x))of subclasses of this class. So the\r\n * cache only stores pre-evaluated values between f(lower-bound) and\r\n * f(upper-bound).\r\n *\r\n * To create a cache for continuous function, it uses sampling.\r\n * With the given sample number , upper-bound, and lower-bound, it calculates\r\n * interval within the ranges. It pre-evaluates and save f(x)\r\n * where x = lower-bound + interval * [0 - sample number]\r\n *\r\n * If a subclass of this class want to use a cache, it should\r\n * explicitly call CreateCache(...) member function. GetCachedValue(x) will\r\n * return pre-evaluated f(x) value. However, the return value from\r\n * GetCachedValue(x) might be different from the exact return value from f(x)\r\n * which is Evaluate(x) member function of subclasses of this class, because\r\n * The GetCachedValue(x) member function internally converts x to cache table\r\n * index and the conversion involves with truncation. So, users can think the\r\n * cached value as an approximate to exact function return.\r\n *\r\n * In some case, approximate values can be useful.\r\n * For example, CompositeValleyFunction can be used as an M-estimator and\r\n * it is currently used for MRIBiasFieldCorrectionFilter\r\n * as an energy function. The bias field estimation requires calculation of\r\n * energy values again and again for each iteration.\r\n * \\ingroup ITKBiasCorrection\r\n */\r\nclass ITKBiasCorrection_EXPORT CacheableScalarFunction\r\n{\r\npublic:\r\n /** Constructor. */\r\n CacheableScalarFunction();\r\n\r\n /** Destructor. */\r\n virtual ~CacheableScalarFunction();\r\n\r\n /** Function's input and output value type. */\r\n typedef double MeasureType;\r\n typedef Array< MeasureType > MeasureArrayType;\r\n\r\n /** Get the number of samples between the lower-bound and upper-bound\r\n * of the cache table. */\r\n SizeValueType GetNumberOfSamples() { return m_NumberOfSamples; }\r\n\r\n /** Check if the internal cache table and its values are valid. */\r\n bool IsCacheAvailable() { return m_CacheAvailable; }\r\n\r\n /** Get the upper-bound of domain that is used for filling the cache table. */\r\n double GetCacheUpperBound() { return m_CacheUpperBound; }\r\n\r\n /** Get the lower-bound of domain that is used for filling the cache table. */\r\n double GetCacheLowerBound() { return m_CacheLowerBound; }\r\n\r\n /** y = f(x)\r\n * Subclasses of this class should override this member function\r\n * to provide their own functional operation . */\r\n virtual MeasureType Evaluate(MeasureType x);\r\n\r\n /** Gets the interval of each cell between the upper and lower bound */\r\n double GetInterval()\r\n { return m_TableInc; }\r\n\r\n /** y = f(x) = (approximately) cache_table(index(x))\r\n * Get the function return using the internal cache table\r\n * NOTE: Since the index calculation needs conversion from double\r\n * to int, truncation happens. As a result, the return values from\r\n * Evaluate(x) and GetCachedValue(x) may not be same for the same x. */\r\n inline MeasureType GetCachedValue(MeasureType x)\r\n {\r\n if ( x > m_CacheUpperBound || x < m_CacheLowerBound )\r\n {\r\n throw ExceptionObject(__FILE__, __LINE__);\r\n }\r\n // access table\r\n int index = (int)( ( x - m_CacheLowerBound ) / m_TableInc );\r\n return m_CacheTable[index];\r\n }\r\n\r\nprotected:\r\n /** Create the internal cache table and fill it with\r\n * pre-evaluated values. */\r\n void CreateCache(double lowerBound, double upperBound, SizeValueType sampleSize);\r\n\r\nprivate:\r\n /** The number of samples will be precalcualted and saved in the\r\n * cache table. */\r\n SizeValueType m_NumberOfSamples;\r\n\r\n /** Storage for the precalcualted function values. */\r\n MeasureArrayType m_CacheTable;\r\n\r\n /** The upper-bound of domain that is used for filling the cache table. */\r\n double m_CacheUpperBound;\r\n\r\n /** The lower-bound of domain that is used for filling the cache table. */\r\n double m_CacheLowerBound;\r\n\r\n /** Sampling interval for function evaluation. */\r\n double m_TableInc;\r\n\r\n /** Is the cache available? */\r\n bool m_CacheAvailable;\r\n}; // end of class\r\n} // end of namespace itk\r\n#endif\r\n", "meta": {"content_hash": "51d37c9de8de62a78f07683e273c75f2", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 83, "avg_line_length": 37.975, "alnum_prop": 0.7072635505815229, "repo_name": "RayRuizhiLiao/ITK_4D", "id": "7e8253127e666055700fad8056f6299ac07b0f1c", "size": "5348", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Modules/Filtering/BiasCorrection/include/itkCacheableScalarFunction.h", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "572693"}, {"name": "C++", "bytes": "36720665"}, {"name": "CMake", "bytes": "1448020"}, {"name": "CSS", "bytes": "18346"}, {"name": "Java", "bytes": "29480"}, {"name": "Objective-C++", "bytes": "6753"}, {"name": "Perl", "bytes": "6113"}, {"name": "Python", "bytes": "385395"}, {"name": "Ruby", "bytes": "309"}, {"name": "Shell", "bytes": "92050"}, {"name": "Tcl", "bytes": "75202"}, {"name": "XSLT", "bytes": "8874"}]}} {"text": "SYNONYM\n\n#### According to\nThe Catalogue of Life, 3rd January 2011\n\n#### Published in\nnull\n\n#### Original name\nnull\n\n### Remarks\nnull", "meta": {"content_hash": "5468699806c3bf5dbf4a32ad1f6a97ff", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "e99f2ee32c12ff345dd8995a1012963264085ae6", "size": "183", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/incertae sedis/Pilosella stoloniflora/ Syn. Pilosella kihlmanii/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "hasErrors()) {\n $user = $this->getUser();\n if (!$user || !$user->validatePassword($this->password)) {\n $this->addError($attribute, '\u7528\u6237\u540d\u6216\u5bc6\u7801\u4e0d\u6b63\u786e');\n }\n }\n }\n\n /**\n * Logs in a user using the provided username and password.\n *\n * @return bool whether the user is logged in successfully\n */\n public function login()\n {\n if ($this->validate()) {\n return Yii::$app->user->login($this->getUser(), $this->rememberMe ? 3600 * 24 * 30 : 0);\n } else {\n return false;\n }\n }\n\n /**\n * Finds user by [[username]]\n *\n * @return User|null\n */\n protected function getUser()\n {\n if ($this->_user === null) {\n $this->_user = User::findByUsername($this->username);\n }\n\n return $this->_user;\n }\n}\n", "meta": {"content_hash": "0f3d144d66f7d9c3c31c32df46d3f843", "timestamp": "", "source": "github", "line_count": 78, "max_line_length": 100, "avg_line_length": 23.525641025641026, "alnum_prop": 0.5340599455040872, "repo_name": "evoshop/evo_maa", "id": "49f1ebec1788c60fd68d3486c1bf5fa194aa60ea", "size": "1853", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "common/models/LoginForm.php", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ApacheConf", "bytes": "538"}, {"name": "Batchfile", "bytes": "1546"}, {"name": "CSS", "bytes": "348105"}, {"name": "JavaScript", "bytes": "2264688"}, {"name": "PHP", "bytes": "217575"}, {"name": "Shell", "bytes": "3256"}]}} {"text": "\n\npackage acteve.explorer;\n\nclass RWRecord {\n\tint id;\n\tint fldId;\n\n\tRWRecord(int id, int fldId)\n\t{\n\t\tthis.id = id;\n\t\tthis.fldId = fldId;\n\t}\n\n\tpublic boolean equals(Object other) {\n\t\tif(!(other instanceof RWRecord))\n\t\t\treturn false;\n\t\tRWRecord otherRecord = (RWRecord) other;\n\t\treturn this.id == otherRecord.id && this.fldId == otherRecord.fldId;\n\t}\n\n\tpublic int hashCode() {\n\t\treturn (id * fldId) % 13;\n\t}\n}\n", "meta": {"content_hash": "2988c75ba3ce72a72fa7d92d10cf380c", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 70, "avg_line_length": 16.32, "alnum_prop": 0.6617647058823529, "repo_name": "JulianSchuette/ConDroid", "id": "5cef558f6c1a574fed546611032911039484d300", "size": "2037", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "src/main/java/acteve/explorer/RWRecord.java", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "6556"}, {"name": "Java", "bytes": "779008"}, {"name": "Shell", "bytes": "1578"}]}} {"text": "const BinExp = require('./binexp.js');\nconst Type = require('./type');\nconst Context = require('../semantic/context.js');\n\nclass BinExpAdd extends BinExp {\n constructor(firstExp, addop, secExp) {\n super();\n this.firstExp = firstExp;\n this.binop = addop;\n this.secExp = secExp;\n }\n\n toString() {\n return `(Add : ${this.firstExp.toString()} ${this.binop.toString()} ${this.secExp.toString()})`;\n }\n\n analyze(context) {\n this.firstExp.type = this.firstExp.analyze(context);\n\n if (this.secExp.toString().length > 0) { // gotta ensure that somethings there\n this.secExp.type = this.secExp.analyze(context);\n if (['+', '-'].includes(this.binop)) {\n const isNumber = this.firstExp.type.intCheck() || this.firstExp.type.floatCheck();\n const isNumberTwo = this.secExp.type.intCheck() || this.secExp.type.floatCheck();\n\n\n if (!isNumber || !isNumberTwo) {\n throw Error('Wrong operands, expected numbers');\n }\n\n const isFloat = this.firstExp.type.floatCheck();\n const isFloatTwo = this.secExp.type.floatCheck();\n\n if (isFloat || isFloatTwo) {\n this.type = Type.FLOAT;\n } else {\n this.type = Type.INT;\n }\n }\n }\n return this.type;\n }\n}\n\nmodule.exports = BinExpAdd;\n", "meta": {"content_hash": "beb5d1c8d24d76bf4c1ff70d5fb368d1", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 100, "avg_line_length": 28.644444444444446, "alnum_prop": 0.6113266097750194, "repo_name": "mitchelljfs/madmaan", "id": "ecfef08a7853920206f1a562b3f3546eb6bb9e39", "size": "1289", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "entities/binexpAdd.js", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "34944"}]}} {"text": "\n\n\n \n \n \n \n \n .action.towishlist\n \n \n \n \n \n\n", "meta": {"content_hash": "a20d4b8cf4ee83bc1b5d6982fa736b4b", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 153, "avg_line_length": 41.0, "alnum_prop": 0.593974175035868, "repo_name": "enettolima/magento-training", "id": "1e4f27875c6bdae65b6a4e8cb2cdb1dcfffca32b", "size": "795", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "magento2ce/app/code/Magento/Wishlist/view/frontend/layout/catalog_product_view.xml", "mode": "33188", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "22648"}, {"name": "CSS", "bytes": "3382928"}, {"name": "HTML", "bytes": "8749335"}, {"name": "JavaScript", "bytes": "7355635"}, {"name": "PHP", "bytes": "58607662"}, {"name": "Perl", "bytes": "10258"}, {"name": "Shell", "bytes": "41887"}, {"name": "XSLT", "bytes": "19889"}]}} {"text": "\npackage com.alibaba.rocketmq.common;\n\nimport com.alibaba.rocketmq.remoting.common.RemotingHelper;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.File;\nimport java.io.IOException;\nimport java.lang.management.ManagementFactory;\nimport java.lang.management.RuntimeMXBean;\nimport java.net.Inet4Address;\nimport java.net.InetAddress;\nimport java.net.NetworkInterface;\nimport java.text.NumberFormat;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.*;\nimport java.util.zip.CRC32;\nimport java.util.zip.DeflaterOutputStream;\nimport java.util.zip.InflaterInputStream;\n\n\n/**\n * @author shijia.wxr\n */\npublic class UtilAll {\n public static final String YYYY_MM_DD_HH_MM_SS = \"yyyy-MM-dd HH:mm:ss\";\n public static final String YYYY_MM_DD_HH_MM_SS_SSS = \"yyyy-MM-dd#HH:mm:ss:SSS\";\n public static final String YYYY_MMDD_HHMMSS = \"yyyyMMddHHmmss\";\n\n\n public static int getPid() {\n RuntimeMXBean runtime = ManagementFactory.getRuntimeMXBean();\n String name = runtime.getName(); // format: \"pid@hostname\"\n try {\n return Integer.parseInt(name.substring(0, name.indexOf('@')));\n } catch (Exception e) {\n return -1;\n }\n }\n\n public static String currentStackTrace() {\n StringBuilder sb = new StringBuilder();\n StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();\n for (StackTraceElement ste : stackTrace) {\n sb.append(\"\\n\\t\");\n sb.append(ste.toString());\n }\n\n return sb.toString();\n }\n\n public static String offset2FileName(final long offset) {\n final NumberFormat nf = NumberFormat.getInstance();\n nf.setMinimumIntegerDigits(20);\n nf.setMaximumFractionDigits(0);\n nf.setGroupingUsed(false);\n return nf.format(offset);\n }\n\n public static long computeEclipseTimeMilliseconds(final long beginTime) {\n return System.currentTimeMillis() - beginTime;\n }\n\n\n public static boolean isItTimeToDo(final String when) {\n String[] whiles = when.split(\";\");\n if (whiles != null && whiles.length > 0) {\n Calendar now = Calendar.getInstance();\n for (String w : whiles) {\n int nowHour = Integer.parseInt(w);\n if (nowHour == now.get(Calendar.HOUR_OF_DAY)) {\n return true;\n }\n }\n }\n\n return false;\n }\n\n\n public static String timeMillisToHumanString() {\n return timeMillisToHumanString(System.currentTimeMillis());\n }\n\n\n public static String timeMillisToHumanString(final long t) {\n Calendar cal = Calendar.getInstance();\n cal.setTimeInMillis(t);\n return String.format(\"%04d%02d%02d%02d%02d%02d%03d\", cal.get(Calendar.YEAR), cal.get(Calendar.MONTH) + 1,\n cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), cal.get(Calendar.SECOND),\n cal.get(Calendar.MILLISECOND));\n }\n\n\n public static long computNextMorningTimeMillis() {\n Calendar cal = Calendar.getInstance();\n cal.setTimeInMillis(System.currentTimeMillis());\n cal.add(Calendar.DAY_OF_MONTH, 1);\n cal.set(Calendar.HOUR_OF_DAY, 0);\n cal.set(Calendar.MINUTE, 0);\n cal.set(Calendar.SECOND, 0);\n cal.set(Calendar.MILLISECOND, 0);\n\n return cal.getTimeInMillis();\n }\n\n\n public static long computNextMinutesTimeMillis() {\n Calendar cal = Calendar.getInstance();\n cal.setTimeInMillis(System.currentTimeMillis());\n cal.add(Calendar.DAY_OF_MONTH, 0);\n cal.add(Calendar.HOUR_OF_DAY, 0);\n cal.add(Calendar.MINUTE, 1);\n cal.set(Calendar.SECOND, 0);\n cal.set(Calendar.MILLISECOND, 0);\n\n return cal.getTimeInMillis();\n }\n\n\n public static long computNextHourTimeMillis() {\n Calendar cal = Calendar.getInstance();\n cal.setTimeInMillis(System.currentTimeMillis());\n cal.add(Calendar.DAY_OF_MONTH, 0);\n cal.add(Calendar.HOUR_OF_DAY, 1);\n cal.set(Calendar.MINUTE, 0);\n cal.set(Calendar.SECOND, 0);\n cal.set(Calendar.MILLISECOND, 0);\n\n return cal.getTimeInMillis();\n }\n\n\n public static long computNextHalfHourTimeMillis() {\n Calendar cal = Calendar.getInstance();\n cal.setTimeInMillis(System.currentTimeMillis());\n cal.add(Calendar.DAY_OF_MONTH, 0);\n cal.add(Calendar.HOUR_OF_DAY, 1);\n cal.set(Calendar.MINUTE, 30);\n cal.set(Calendar.SECOND, 0);\n cal.set(Calendar.MILLISECOND, 0);\n\n return cal.getTimeInMillis();\n }\n\n\n public static String timeMillisToHumanString2(final long t) {\n Calendar cal = Calendar.getInstance();\n cal.setTimeInMillis(t);\n return String.format(\"%04d-%02d-%02d %02d:%02d:%02d,%03d\",\n cal.get(Calendar.YEAR),\n cal.get(Calendar.MONTH) + 1,\n cal.get(Calendar.DAY_OF_MONTH),\n cal.get(Calendar.HOUR_OF_DAY),\n cal.get(Calendar.MINUTE),\n cal.get(Calendar.SECOND),\n cal.get(Calendar.MILLISECOND));\n }\n\n\n public static String timeMillisToHumanString3(final long t) {\n Calendar cal = Calendar.getInstance();\n cal.setTimeInMillis(t);\n return String.format(\"%04d%02d%02d%02d%02d%02d\",\n cal.get(Calendar.YEAR),\n cal.get(Calendar.MONTH) + 1,\n cal.get(Calendar.DAY_OF_MONTH),\n cal.get(Calendar.HOUR_OF_DAY),\n cal.get(Calendar.MINUTE),\n cal.get(Calendar.SECOND));\n }\n\n\n public static double getDiskPartitionSpaceUsedPercent(final String path) {\n if (null == path || path.isEmpty())\n return -1;\n\n try {\n File file = new File(path);\n if (!file.exists()) {\n boolean result = file.mkdirs();\n if (!result) {\n }\n }\n\n long totalSpace = file.getTotalSpace();\n long freeSpace = file.getFreeSpace();\n long usedSpace = totalSpace - freeSpace;\n if (totalSpace > 0) {\n return usedSpace / (double) totalSpace;\n }\n } catch (Exception e) {\n return -1;\n }\n\n return -1;\n }\n\n\n public static final int crc32(byte[] array) {\n if (array != null) {\n return crc32(array, 0, array.length);\n }\n\n return 0;\n }\n\n\n public static final int crc32(byte[] array, int offset, int length) {\n CRC32 crc32 = new CRC32();\n crc32.update(array, offset, length);\n return (int) (crc32.getValue() & 0x7FFFFFFF);\n }\n\n final static char[] HEX_ARRAY = \"0123456789ABCDEF\".toCharArray();\n\n public static String bytes2string(byte[] src) {\n char[] hexChars = new char[src.length * 2];\n for (int j = 0; j < src.length; j++) {\n int v = src[j] & 0xFF;\n hexChars[j * 2] = HEX_ARRAY[v >>> 4];\n hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F];\n }\n return new String(hexChars);\n }\n\n public static byte[] string2bytes(String hexString) {\n if (hexString == null || hexString.equals(\"\")) {\n return null;\n }\n hexString = hexString.toUpperCase();\n int length = hexString.length() / 2;\n char[] hexChars = hexString.toCharArray();\n byte[] d = new byte[length];\n for (int i = 0; i < length; i++) {\n int pos = i * 2;\n d[i] = (byte) (charToByte(hexChars[pos]) << 4 | charToByte(hexChars[pos + 1]));\n }\n return d;\n }\n\n\n private static byte charToByte(char c) {\n return (byte) \"0123456789ABCDEF\".indexOf(c);\n }\n\n\n public static byte[] uncompress(final byte[] src) throws IOException {\n byte[] result = src;\n byte[] uncompressData = new byte[src.length];\n ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(src);\n InflaterInputStream inflaterInputStream = new InflaterInputStream(byteArrayInputStream);\n ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(src.length);\n\n try {\n while (true) {\n int len = inflaterInputStream.read(uncompressData, 0, uncompressData.length);\n if (len <= 0) {\n break;\n }\n byteArrayOutputStream.write(uncompressData, 0, len);\n }\n byteArrayOutputStream.flush();\n result = byteArrayOutputStream.toByteArray();\n } catch (IOException e) {\n throw e;\n } finally {\n try {\n byteArrayInputStream.close();\n } catch (IOException e) {\n }\n try {\n inflaterInputStream.close();\n } catch (IOException e) {\n }\n try {\n byteArrayOutputStream.close();\n } catch (IOException e) {\n }\n }\n\n return result;\n }\n\n\n public static byte[] compress(final byte[] src, final int level) throws IOException {\n byte[] result = src;\n ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(src.length);\n java.util.zip.Deflater defeater = new java.util.zip.Deflater(level);\n DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream(byteArrayOutputStream, defeater);\n try {\n deflaterOutputStream.write(src);\n deflaterOutputStream.finish();\n deflaterOutputStream.close();\n result = byteArrayOutputStream.toByteArray();\n } catch (IOException e) {\n defeater.end();\n throw e;\n } finally {\n try {\n byteArrayOutputStream.close();\n } catch (IOException ignored) {\n }\n\n defeater.end();\n }\n\n return result;\n }\n\n\n public static int asInt(String str, int defaultValue) {\n try {\n return Integer.parseInt(str);\n } catch (Exception e) {\n return defaultValue;\n }\n }\n\n\n public static long asLong(String str, long defaultValue) {\n try {\n return Long.parseLong(str);\n } catch (Exception e) {\n return defaultValue;\n }\n }\n\n\n public static String formatDate(Date date, String pattern) {\n SimpleDateFormat df = new SimpleDateFormat(pattern);\n return df.format(date);\n }\n\n\n public static Date parseDate(String date, String pattern) {\n SimpleDateFormat df = new SimpleDateFormat(pattern);\n try {\n return df.parse(date);\n } catch (ParseException e) {\n return null;\n }\n }\n\n\n public static String responseCode2String(final int code) {\n return Integer.toString(code);\n }\n\n\n public static String frontStringAtLeast(final String str, final int size) {\n if (str != null) {\n if (str.length() > size) {\n return str.substring(0, size);\n }\n }\n\n return str;\n }\n\n\n public static boolean isBlank(String str) {\n int strLen;\n if (str == null || (strLen = str.length()) == 0) {\n return true;\n }\n for (int i = 0; i < strLen; i++) {\n if (!Character.isWhitespace(str.charAt(i))) {\n return false;\n }\n }\n return true;\n }\n\n\n public static String jstack() {\n return jstack(Thread.getAllStackTraces());\n }\n\n\n public static String jstack(Map map) {\n StringBuilder result = new StringBuilder();\n try {\n Iterator> ite = map.entrySet().iterator();\n while (ite.hasNext()) {\n Map.Entry entry = ite.next();\n StackTraceElement[] elements = entry.getValue();\n Thread thread = entry.getKey();\n if (elements != null && elements.length > 0) {\n String threadName = entry.getKey().getName();\n result.append(String.format(\"%-40sTID: %d STATE: %s%n\", threadName, thread.getId(), thread.getState()));\n for (StackTraceElement el : elements) {\n result.append(String.format(\"%-40s%s%n\", threadName, el.toString()));\n }\n result.append(\"\\n\");\n }\n }\n } catch (Throwable e) {\n result.append(RemotingHelper.exceptionSimpleDesc(e));\n }\n\n return result.toString();\n }\n\n public static boolean isInternalIP(byte[] ip) {\n if (ip.length != 4) {\n throw new RuntimeException(\"illegal ipv4 bytes\");\n }\n\n\n //10.0.0.0~10.255.255.255\n //172.16.0.0~172.31.255.255\n //192.168.0.0~192.168.255.255\n if (ip[0] == (byte) 10) {\n\n return true;\n } else if (ip[0] == (byte) 172) {\n if (ip[1] >= (byte) 16 && ip[1] <= (byte) 31) {\n return true;\n }\n } else if (ip[0] == (byte) 192) {\n if (ip[1] == (byte) 168) {\n return true;\n }\n }\n return false;\n }\n\n private static boolean ipCheck(byte[] ip) {\n if (ip.length != 4) {\n throw new RuntimeException(\"illegal ipv4 bytes\");\n }\n\n// if (ip[0] == (byte)30 && ip[1] == (byte)10 && ip[2] == (byte)163 && ip[3] == (byte)120) {\n// }\n\n\n if (ip[0] >= (byte) 1 && ip[0] <= (byte) 126) {\n if (ip[1] == (byte) 1 && ip[2] == (byte) 1 && ip[3] == (byte) 1) {\n return false;\n }\n if (ip[1] == (byte) 0 && ip[2] == (byte) 0 && ip[3] == (byte) 0) {\n return false;\n }\n return true;\n } else if (ip[0] >= (byte) 128 && ip[0] <= (byte) 191) {\n if (ip[2] == (byte) 1 && ip[3] == (byte) 1) {\n return false;\n }\n if (ip[2] == (byte) 0 && ip[3] == (byte) 0) {\n return false;\n }\n return true;\n } else if (ip[0] >= (byte) 192 && ip[0] <= (byte) 223) {\n if (ip[3] == (byte) 1) {\n return false;\n }\n if (ip[3] == (byte) 0) {\n return false;\n }\n return true;\n }\n return false;\n }\n\n public static String ipToIPv4Str(byte[] ip) {\n if (ip.length != 4) {\n return null;\n }\n return new StringBuilder().append(ip[0] & 0xFF).append(\".\").append(\n ip[1] & 0xFF).append(\".\").append(ip[2] & 0xFF)\n .append(\".\").append(ip[3] & 0xFF).toString();\n }\n\n public static byte[] getIP() {\n try {\n Enumeration allNetInterfaces = NetworkInterface.getNetworkInterfaces();\n InetAddress ip = null;\n byte[] internalIP = null;\n while (allNetInterfaces.hasMoreElements()) {\n NetworkInterface netInterface = (NetworkInterface) allNetInterfaces.nextElement();\n Enumeration addresses = netInterface.getInetAddresses();\n while (addresses.hasMoreElements()) {\n ip = (InetAddress) addresses.nextElement();\n if (ip != null && ip instanceof Inet4Address) {\n byte[] ipByte = ip.getAddress();\n if (ipByte.length == 4) {\n if (ipCheck(ipByte)) {\n if (!isInternalIP(ipByte)) {\n return ipByte;\n } else if (internalIP == null) {\n internalIP = ipByte;\n }\n }\n }\n }\n }\n }\n if (internalIP != null) {\n return internalIP;\n } else {\n throw new RuntimeException(\"Can not get local ip\");\n }\n } catch (Exception e) {\n throw new RuntimeException(\"Can not get local ip\", e);\n }\n }\n}\n", "meta": {"content_hash": "34f100d92b29da04da090edcffa6ecbf", "timestamp": "", "source": "github", "line_count": 510, "max_line_length": 130, "avg_line_length": 32.0, "alnum_prop": 0.5378063725490196, "repo_name": "lollipopjin/incubator-rocketmq", "id": "4429e3d4105eec9d80a6f437c1dccdd5562649dc", "size": "17118", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rocketmq-common/src/main/java/com/alibaba/rocketmq/common/UtilAll.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "1747"}, {"name": "Java", "bytes": "2761168"}, {"name": "Shell", "bytes": "34084"}]}} {"text": "Consolidated [TypeScript](https://www.typescriptlang.org/) dependencies for transpiling ES.\n\n\n\n\n\n\n---\n### License: MIT\n", "meta": {"content_hash": "c79118233afe785442ed723d33a2dd07", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 91, "avg_line_length": 13.222222222222221, "alnum_prop": 0.7142857142857143, "repo_name": "philcockfield/babel", "id": "ccd257f81bac00cb8ecbf6273057984ec0dc9bcb", "size": "135", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "js-typescript/README.md", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "1398"}]}} {"text": "\n * @author Lukas Smith (PEAR MDB2 library)\n * @author Lorenzo Alberton (PEAR MDB2 Interbase driver)\n * @version $Revision: 4037 $\n * @link www.phpdoctrine.org\n * @since 1.0\n */\nclass Doctrine_Connection_Firebird extends Doctrine_Connection\n{\n /**\n * @var string $driverName the name of this connection driver\n */\n protected $driverName = 'Firebird';\n\n /**\n * the constructor\n *\n * @param Doctrine_Manager $manager\n * @param PDO $pdo database handle\n */\n public function __construct(Doctrine_Manager $manager, $adapter)\n {\n\n $this->supported = array(\n 'sequences' => true,\n 'indexes' => true,\n 'affected_rows' => true,\n 'summary_functions' => true,\n 'order_by_text' => true,\n 'transactions' => true,\n 'savepoints' => true,\n 'current_id' => true,\n 'limit_queries' => 'emulated',\n 'LOBs' => true,\n 'replace' => 'emulated',\n 'sub_selects' => true,\n 'auto_increment' => true,\n 'primary_key' => true,\n 'result_introspection' => true,\n 'prepared_statements' => true,\n 'identifier_quoting' => false,\n 'pattern_escaping' => true\n );\n // initialize all driver options\n /**\n $this->options['DBA_username'] = false;\n $this->options['DBA_password'] = false;\n $this->options['database_path'] = '';\n $this->options['database_extension'] = '.gdb';\n $this->options['server_version'] = '';\n */\n parent::__construct($manager, $adapter);\n }\n\n /**\n * Set the charset on the current connection\n *\n * @param string charset\n *\n * @return void\n */\n public function setCharset($charset)\n {\n $query = 'SET NAMES '.$this->dbh->quote($charset);\n $this->exec($query);\n }\n\n /**\n * Adds an driver-specific LIMIT clause to the query\n *\n * @param string $query query to modify\n * @param integer $limit limit the number of rows\n * @param integer $offset start reading from given offset\n * @return string modified query\n */\n public function modifyLimitQuery($query, $limit = false, $offset = false, $isManip = false)\n {\n if ($limit > 0) {\n $query = preg_replace('/^([\\s(])*SELECT(?!\\s*FIRST\\s*\\d+)/i',\n \"SELECT FIRST $limit SKIP $offset\", $query);\n }\n return $query;\n }\n}", "meta": {"content_hash": "9d406a6c4b9169fa928aa5d89baaa122", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 95, "avg_line_length": 35.623655913978496, "alnum_prop": 0.46966495623302146, "repo_name": "nbonamy/doctrine-0.10.4", "id": "52c2fab8065630b7048cf95cb32c820ce1146717", "size": "4341", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/Doctrine/Connection/Firebird.php", "mode": "33188", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "52"}, {"name": "CSS", "bytes": "6908"}, {"name": "HTML", "bytes": "6657357"}, {"name": "JavaScript", "bytes": "3353"}, {"name": "PHP", "bytes": "3995824"}, {"name": "Smarty", "bytes": "1328"}]}} {"text": "\n\n#import \"MPColorTools.h\"\n#import \"TML.h\"\n#import \"TMLAttributedDecorationTokenizer.h\"\n#import \"TMLConfiguration.h\"\n\n@interface TMLAttributedDecorationTokenizer ()\n\n@end\n\n@implementation TMLAttributedDecorationTokenizer\n\n+ (void)addStroke:(NSObject *)data toRange: (NSRange) range inAttributedString: (NSMutableAttributedString *) attributedString {\n NSDictionary *styles = ((NSDictionary *) data);\n\n if ([styles objectForKey:@\"color\"]) {\n [attributedString addAttribute: NSStrokeColorAttributeName value: [self colorFromData:[styles objectForKey:@\"color\"]] range:range];\n }\n\n if ([styles objectForKey:@\"width\"]) {\n float width = [[styles objectForKey:@\"width\"] floatValue];\n [attributedString addAttribute: NSStrokeWidthAttributeName value: @(width) range:range];\n }\n}\n\n+ (void)addShadow:(NSObject *)data toRange: (NSRange) range inAttributedString: (NSMutableAttributedString *) attributedString {\n NSDictionary *styles = ((NSDictionary *) data);\n\n NSShadow *shadow = [[NSShadow alloc] init];\n if ([styles objectForKey:@\"offset\"]) {\n NSArray *parts = [[styles objectForKey:@\"offset\"] componentsSeparatedByString:@\",\"];\n if ([parts count] == 2)\n shadow.shadowOffset = CGSizeMake([[parts objectAtIndex:0] floatValue], [[parts objectAtIndex:1] floatValue]);\n }\n \n if ([styles objectForKey:@\"radius\"]) {\n shadow.shadowBlurRadius = [[styles objectForKey:@\"radius\"] floatValue];\n }\n\n if ([styles objectForKey:@\"color\"]) {\n shadow.shadowColor = [self colorFromData:[styles objectForKey:@\"color\"]];\n }\n \n [attributedString addAttribute: NSShadowAttributeName value: shadow range:range];\n}\n\n+ (void)addTextEffects:(NSObject *)data toRange: (NSRange) range inAttributedString: (NSMutableAttributedString *) attributedString {\n NSString *style = ((NSString *) data);\n if ([style isEqualToString:@\"letterpress\"]) {\n [attributedString addAttribute: NSTextEffectAttributeName value: NSTextEffectLetterpressStyle range:range];\n }\n}\n\n+ (void)addParagraphStyles:(NSObject *)data toRange: (NSRange) range inAttributedString: (NSMutableAttributedString *) attributedString {\n NSDictionary *styles = ((NSDictionary *) data);\n \n NSMutableParagraphStyle *paragraphStyle = [[NSMutableParagraphStyle alloc] init];\n \n if ([styles objectForKey:@\"line-spacing\"])\n paragraphStyle.lineSpacing = [[styles objectForKey:@\"line-spacing\"] floatValue];\n\n if ([styles objectForKey:@\"paragraph-spacing\"])\n paragraphStyle.paragraphSpacing = [[styles objectForKey:@\"paragraph-spacing\"] floatValue];\n\n if ([styles objectForKey:@\"alignment\"]) {\n NSString *alignment = [styles objectForKey:@\"alignment\"];\n if ([alignment isEqualToString:@\"left\"])\n paragraphStyle.alignment = NSTextAlignmentLeft;\n else if ([alignment isEqualToString:@\"right\"])\n paragraphStyle.alignment = NSTextAlignmentRight;\n else if ([alignment isEqualToString:@\"center\"])\n paragraphStyle.alignment = NSTextAlignmentCenter;\n else if ([alignment isEqualToString:@\"justified\"])\n paragraphStyle.alignment = NSTextAlignmentJustified;\n else if ([alignment isEqualToString:@\"natural\"])\n paragraphStyle.alignment = NSTextAlignmentNatural;\n }\n \n if ([styles objectForKey:@\"first-line-head-indent\"])\n paragraphStyle.firstLineHeadIndent = [[styles objectForKey:@\"first-line-head-indent\"] floatValue];\n\n if ([styles objectForKey:@\"head-indent\"])\n paragraphStyle.headIndent = [[styles objectForKey:@\"head-indent\"] floatValue];\n\n if ([styles objectForKey:@\"tail-indent\"])\n paragraphStyle.tailIndent = [[styles objectForKey:@\"tail-indent\"] floatValue];\n\n if ([styles objectForKey:@\"line-breaking-mode\"]) {\n NSString *mode = (NSString *) [styles objectForKey:@\"line-breaking-mode\"];\n if ([mode isEqualToString:@\"word\"])\n paragraphStyle.lineBreakMode = NSLineBreakByWordWrapping;\n else if ([mode isEqualToString:@\"char\"])\n paragraphStyle.lineBreakMode = NSLineBreakByCharWrapping;\n else if ([mode isEqualToString:@\"clipping\"])\n paragraphStyle.lineBreakMode = NSLineBreakByClipping;\n else if ([mode isEqualToString:@\"truncate-head\"])\n paragraphStyle.lineBreakMode = NSLineBreakByTruncatingHead;\n else if ([mode isEqualToString:@\"truncate-tail\"])\n paragraphStyle.lineBreakMode = NSLineBreakByTruncatingTail;\n else if ([mode isEqualToString:@\"truncate-middle\"])\n paragraphStyle.lineBreakMode = NSLineBreakByTruncatingMiddle;\n }\n\n if ([styles objectForKey:@\"minimum-line-height\"])\n paragraphStyle.minimumLineHeight = [[styles objectForKey:@\"minimum-line-height\"] floatValue];\n \n if ([styles objectForKey:@\"maximum-line-height\"])\n paragraphStyle.maximumLineHeight = [[styles objectForKey:@\"maximum-line-height\"] floatValue];\n \n if ([styles objectForKey:@\"writing-direction\"]) {\n NSString *dir = (NSString *) [styles objectForKey:@\"writing-direction\"];\n if ([dir isEqualToString:@\"natural\"])\n paragraphStyle.baseWritingDirection = NSWritingDirectionNatural;\n else if ([dir isEqualToString:@\"ltr\"])\n paragraphStyle.baseWritingDirection = NSWritingDirectionLeftToRight;\n else if ([dir isEqualToString:@\"rtl\"])\n paragraphStyle.baseWritingDirection = NSWritingDirectionRightToLeft;\n }\n\n if ([styles objectForKey:@\"line-height-multiple\"])\n paragraphStyle.lineHeightMultiple = [[styles objectForKey:@\"line-height-multiple\"] floatValue];\n\n if ([styles objectForKey:@\"line-height-multiple\"])\n paragraphStyle.paragraphSpacingBefore = [[styles objectForKey:@\"line-height-multiple\"] floatValue];\n \n [attributedString addAttribute: NSParagraphStyleAttributeName value:paragraphStyle range:range];\n}\n\n+ (void)addStrikeThrough:(NSObject *)data toRange: (NSRange) range inAttributedString: (NSMutableAttributedString *) attributedString {\n if ([data isKindOfClass:NSString.class]) {\n NSString *thickness = ((NSString *) data);\n [attributedString addAttribute: NSStrikethroughStyleAttributeName value:@([thickness intValue]) range:range];\n return;\n }\n \n if ([data isKindOfClass:NSDictionary.class]) {\n NSDictionary *options = (NSDictionary *) data;\n if ([options objectForKey:@\"thickness\"]) {\n [attributedString addAttribute: NSStrikethroughStyleAttributeName value: @([[options objectForKey:@\"thickness\"] intValue]) range:range];\n }\n NSString *color = [options objectForKey:@\"color\"];\n if (color) {\n [attributedString addAttribute: NSStrikethroughColorAttributeName value: [self colorFromData:color] range:range];\n }\n }\n}\n\n+ (NSUnderlineStyle) underlineOptionsFromData: (NSObject *)data {\n NSUnderlineStyle opts = NSUnderlineStyleNone;\n \n if ([data isKindOfClass:NSString.class]) {\n if ([data isEqual:@\"none\"]) {\n opts = NSUnderlineStyleNone;\n } else if ([data isEqual:@\"single\"]) {\n opts = NSUnderlineStyleSingle;\n } else if ([data isEqual:@\"double\"]) {\n opts = NSUnderlineStyleDouble;\n } else if ([data isEqual:@\"thick\"]) {\n opts = NSUnderlineStyleThick;\n }\n return opts;\n }\n \n if ([data isKindOfClass:NSDictionary.class]) {\n NSDictionary *options = (NSDictionary *) data;\n \n NSString *style = [options objectForKey:@\"style\"];\n if (style == nil) style = @\"single\";\n NSString *pattern = [options objectForKey:@\"pattern\"];\n if (pattern == nil) pattern = @\"solid\";\n NSString *byword = [options objectForKey:@\"byword\"];\n if (byword == nil) byword = @\"false\";\n \n if ([style isEqual:@\"none\"]) {\n opts = NSUnderlineStyleNone;\n } else if ([style isEqual:@\"single\"]) {\n opts = NSUnderlineStyleSingle;\n } else if ([style isEqual:@\"double\"]) {\n opts = NSUnderlineStyleDouble;\n } else if ([style isEqual:@\"thick\"]) {\n opts = NSUnderlineStyleThick;\n }\n \n if ([pattern isEqual:@\"solid\"]) {\n opts = opts | NSUnderlinePatternSolid;\n } else if ([pattern isEqual:@\"dot\"]) {\n opts = opts | NSUnderlinePatternDot;\n } else if ([pattern isEqual:@\"dash\"]) {\n opts = opts | NSUnderlinePatternDash;\n } else if ([pattern isEqual:@\"dashdot\"]) {\n opts = opts | NSUnderlinePatternDashDot;\n } else if ([pattern isEqual:@\"dashdotdot\"]) {\n opts = opts | NSUnderlinePatternDashDotDot;\n } else if ([pattern isEqual:@\"dashdotdot\"]) {\n opts = opts | NSUnderlinePatternDashDotDot;\n }\n \n if ([byword isEqual:@\"true\"]) {\n opts = opts | NSUnderlineByWord;\n }\n return opts;\n }\n \n return opts;\n}\n\n+ (void)addUnderline:(NSObject *)data toRange: (NSRange) range inAttributedString: (NSMutableAttributedString *) attributedString {\n [attributedString addAttribute: NSUnderlineStyleAttributeName value:@([self underlineOptionsFromData:data]) range:range];\n\n if ([data isKindOfClass:NSDictionary.class]) {\n NSDictionary *options = (NSDictionary *) data;\n NSString *color = [options objectForKey:@\"color\"];\n if (color) {\n [attributedString addAttribute: NSUnderlineColorAttributeName value: [self colorFromData:color] range:range];\n }\n }\n}\n\n/**\n * @{@\"font\": [UIFont fontWithName....]}\n * @{@\"font\": @{@\"name\": @\"Arial\", @\"size\": @8}}\n * @{@\"font\": @\"Arial, 8\"}\n */\n\n+ (UIFont *) fontFromData: (NSObject *)data {\n if ([data isKindOfClass: UIFont.class]) {\n return (UIFont *) data;\n }\n \n if ([data isKindOfClass: NSDictionary.class]) {\n NSDictionary *settings = (NSDictionary *) data;\n NSString *fontName = [settings objectForKey:@\"name\"];\n NSNumber *fontSize = [settings objectForKey:@\"size\"];\n \n if ([fontName isEqualToString:@\"system\"]) {\n if ([[settings objectForKey:@\"type\"] isEqualToString:@\"bold\"]) {\n return [UIFont boldSystemFontOfSize:[fontSize floatValue]];\n }\n if ([[settings objectForKey:@\"type\"] isEqualToString:@\"italic\"]) {\n return [UIFont italicSystemFontOfSize:[fontSize floatValue]];\n }\n return [UIFont systemFontOfSize:[fontSize floatValue]];\n }\n\n return [UIFont fontWithName:fontName size:[fontSize floatValue]];\n }\n \n if ([data isKindOfClass: NSString.class]) {\n NSArray *elements = [((NSString *) data) componentsSeparatedByString:@\",\"];\n if ([elements count] < 2) return nil;\n NSString *fontName = [elements objectAtIndex:0];\n float fontSize = [[elements objectAtIndex:1] floatValue];\n return [UIFont fontWithName:fontName size:fontSize];\n }\n \n return nil;\n}\n\n+ (void)addFont:(NSObject *)data toRange: (NSRange) range inAttributedString: (NSMutableAttributedString *) attributedString {\n UIFont *font = [self fontFromData:data];\n if (font == nil) return;\n [attributedString addAttribute: NSFontAttributeName value:font range:range];\n}\n\n/**\n * @{@\"color\": [UIColor ...]}\n * @{@\"color\": @{@\"red\": @111, @\"green\": @8 ...}}\n * @{@\"color\": @\"fbc\"}\n */\n+ (UIColor *) colorFromData: (NSObject *)data {\n if ([data isKindOfClass: UIColor.class]) {\n return (UIColor *) data;\n }\n \n if ([data isKindOfClass: NSDictionary.class]) {\n NSDictionary *settings = (NSDictionary *) data;\n UIColor *color = [UIColor colorWithRed:[[settings objectForKey:@\"red\"] floatValue]\n green:[[settings objectForKey:@\"green\"] floatValue]\n blue:[[settings objectForKey:@\"blue\"] floatValue]\n alpha:[[settings objectForKey:@\"alpha\"] floatValue]];\n \n return color;\n }\n \n if ([data isKindOfClass: NSString.class]) {\n NSString *name = ((NSString *) data);\n \n if ([name isEqualToString:@\"black\"]) return [UIColor blackColor];\n if ([name isEqualToString:@\"dark-gray\"]) return [UIColor darkGrayColor];\n if ([name isEqualToString:@\"light-gray\"]) return [UIColor lightGrayColor];\n if ([name isEqualToString:@\"white\"]) return [UIColor whiteColor];\n if ([name isEqualToString:@\"gray\"]) return [UIColor grayColor];\n if ([name isEqualToString:@\"red\"]) return [UIColor redColor];\n if ([name isEqualToString:@\"green\"]) return [UIColor greenColor];\n if ([name isEqualToString:@\"blue\"]) return [UIColor blueColor];\n if ([name isEqualToString:@\"cyan\"]) return [UIColor cyanColor];\n if ([name isEqualToString:@\"yellow\"]) return [UIColor yellowColor];\n if ([name isEqualToString:@\"magenta\"]) return [UIColor magentaColor];\n if ([name isEqualToString:@\"orange\"]) return [UIColor orangeColor];\n if ([name isEqualToString:@\"purple\"]) return [UIColor purpleColor];\n if ([name isEqualToString:@\"brown\"]) return [UIColor brownColor];\n if ([name isEqualToString:@\"clear\"]) return [UIColor clearColor];\n \n return MP_HEX_RGB(name);\n }\n \n return nil;\n}\n\n+ (void)addColor:(NSObject *)data toRange: (NSRange) range inAttributedString: (NSMutableAttributedString *) attributedString {\n [attributedString addAttribute: NSForegroundColorAttributeName value:[self colorFromData:data] range:range];\n}\n\n+ (void)addBackgroundColor:(NSObject *)data toRange: (NSRange) range inAttributedString: (NSMutableAttributedString *) attributedString {\n [attributedString addAttribute: NSBackgroundColorAttributeName value:[self colorFromData:data] range:range];\n}\n\n- (void) applyStyles:(NSDictionary *)styles\n toRanges:(NSArray *)ranges\n inAttributedString:(NSMutableAttributedString *)attributedString\n{\n for (NSString *styleName in [styles allKeys]) {\n NSObject *styleValue = [styles objectForKey:styleName];\n \n for (NSDictionary *rangeData in ranges) {\n NSRange range = NSMakeRange([[rangeData objectForKey:@\"location\"] intValue], [[rangeData objectForKey:@\"length\"] intValue]);\n\n if ([styleName isEqualToString:@\"attributes\"]) {\n NSDictionary *attrs = (NSDictionary *) styleValue;\n [attributedString addAttributes:attrs range:range];\n \n } else if ([styleName isEqualToString:@\"font\"]) {\n [self.class addFont: styleValue toRange: range inAttributedString: attributedString];\n } else if ([styleName isEqualToString:@\"color\"]) {\n [self.class addColor: styleValue toRange: range inAttributedString: attributedString];\n } else if ([styleName isEqualToString:@\"background-color\"]) {\n [self.class addBackgroundColor: styleValue toRange: range inAttributedString: attributedString];\n } else if ([styleName isEqualToString:@\"underline\"]) {\n [self.class addUnderline: styleValue toRange: range inAttributedString: attributedString];\n } else if ([styleName isEqualToString:@\"strike\"]) {\n [self.class addStrikeThrough: styleValue toRange: range inAttributedString: attributedString];\n } else if ([styleName isEqualToString:@\"paragraph\"]) {\n [self.class addParagraphStyles: styleValue toRange: range inAttributedString: attributedString];\n } else if ([styleName isEqualToString:@\"effects\"]) {\n [self.class addTextEffects: styleValue toRange: range inAttributedString: attributedString];\n } else if ([styleName isEqualToString:@\"shadow\"]) {\n [self.class addShadow: styleValue toRange: range inAttributedString: attributedString];\n } else if ([styleName isEqualToString:@\"stroke\"]) {\n [self.class addStroke: styleValue toRange: range inAttributedString: attributedString];\n }\n \n }\n \n }\n}\n\n- (NSString *) applyToken: (NSString *) token toValue: (NSString *) value {\n return value;\n}\n\n- (NSString *) evaluate: (NSObject *) expr location: (int) location {\n if (![expr isKindOfClass:NSArray.class])\n return (NSString *) expr;\n \n NSMutableArray *args = [NSMutableArray arrayWithArray:(NSArray *) expr];\n NSString *token = (NSString *) [args objectAtIndex:0];\n [args removeObjectAtIndex:0];\n\n NSMutableArray *attributeSet = [self.attributes objectForKey:token];\n if (attributeSet == nil) {\n attributeSet = [NSMutableArray array];\n [self.attributes setObject:attributeSet forKey:token];\n }\n \n NSMutableDictionary *attribute = [NSMutableDictionary dictionary];\n [attribute setObject:[NSNumber numberWithInteger:location] forKey:@\"location\"];\n \n NSMutableArray *processedValues = [NSMutableArray array];\n for (NSObject *arg in args) {\n NSString *value = (NSString *) [self evaluate:arg location: location];\n location += [value length];\n [processedValues addObject:value];\n }\n\n NSString *value = [processedValues componentsJoinedByString:@\"\"];\n \n [attribute setObject:[NSNumber numberWithInteger:[value length]] forKey:@\"length\"];\n [attributeSet addObject:attribute];\n \n return [self applyToken:token toValue:value];\n}\n\n- (NSObject *) substituteTokensInLabelUsingData:(NSDictionary *)newTokensData {\n self.tokensData = newTokensData;\n self.attributes = [NSMutableDictionary dictionary];\n NSString *result = [self evaluate: self.expression location:0];\n \n NSMutableAttributedString *attributedString = [[NSMutableAttributedString alloc] initWithString:result];\n \n for (NSString *tokenName in self.tokenNames) {\n if (![self isTokenAllowed:tokenName]) continue;\n \n NSDictionary *styles = [self.tokensData objectForKey:tokenName];\n if (styles == nil) {\n styles = [[[TML sharedInstance] configuration] defaultTokenValueForName:tokenName\n type:TMLDecorationTokenType\n format:TMLAttributedTokenFormat];\n if (styles == nil) continue;\n }\n \n NSArray *ranges = [self.attributes objectForKey:tokenName];\n if (ranges == nil) {\n continue;\n }\n \n [self applyStyles: styles toRanges: ranges inAttributedString: attributedString];\n }\n \n return attributedString;\n}\n\n\n@end", "meta": {"content_hash": "de6843365f2c424dd7f6869e0b04ada6", "timestamp": "", "source": "github", "line_count": 424, "max_line_length": 148, "avg_line_length": 44.117924528301884, "alnum_prop": 0.6491500053458783, "repo_name": "translationexchange/tml-objc", "id": "564014afc40b2a55a2ca530469fd200330c8348c", "size": "20614", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "TMLSandbox/Pods/TMLKit/Classes/Tokenizers/TMLAttributedDecorationTokenizer.m", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "343086"}, {"name": "C++", "bytes": "19355"}, {"name": "Objective-C", "bytes": "1703946"}, {"name": "Ruby", "bytes": "10372"}, {"name": "Shell", "bytes": "11954"}]}} {"text": "module RuboCop\n module Cop\n module Layout\n # Checks for spaces inside ordinary round parentheses.\n #\n # @example EnforcedStyle: no_space (default)\n # # The `no_space` style enforces that parentheses do not have spaces.\n #\n # # bad\n # f( 3)\n # g = (a + 3 )\n #\n # # good\n # f(3)\n # g = (a + 3)\n #\n # @example EnforcedStyle: space\n # # The `space` style enforces that parentheses have a space at the\n # # beginning and end.\n # # Note: Empty parentheses should not have spaces.\n #\n # # bad\n # f(3)\n # g = (a + 3)\n # y( )\n #\n # # good\n # f( 3 )\n # g = ( a + 3 )\n # y()\n #\n class SpaceInsideParens < Base\n include SurroundingSpace\n include RangeHelp\n include ConfigurableEnforcedStyle\n extend AutoCorrector\n\n MSG = 'Space inside parentheses detected.'\n MSG_SPACE = 'No space inside parentheses detected.'\n\n def on_new_investigation\n @processed_source = processed_source\n\n if style == :space\n each_missing_space(processed_source.tokens) do |range|\n add_offense(range, message: MSG_SPACE) do |corrector|\n corrector.insert_before(range, ' ')\n end\n end\n else\n each_extraneous_space(processed_source.tokens) do |range|\n add_offense(range) do |corrector|\n corrector.remove(range)\n end\n end\n end\n end\n\n private\n\n def each_extraneous_space(tokens)\n tokens.each_cons(2) do |token1, token2|\n next unless parens?(token1, token2)\n\n # If the second token is a comment, that means that a line break\n # follows, and that the rules for space inside don't apply.\n next if token2.comment?\n next unless same_line?(token1, token2) && token1.space_after?\n\n yield range_between(token1.end_pos, token2.begin_pos)\n end\n end\n\n def each_missing_space(tokens)\n tokens.each_cons(2) do |token1, token2|\n next if can_be_ignored?(token1, token2)\n\n if token1.left_parens?\n yield range_between(token2.begin_pos, token2.begin_pos + 1)\n elsif token2.right_parens?\n yield range_between(token2.begin_pos, token2.end_pos)\n end\n end\n end\n\n def same_line?(token1, token2)\n token1.line == token2.line\n end\n\n def parens?(token1, token2)\n token1.left_parens? || token2.right_parens?\n end\n\n def can_be_ignored?(token1, token2)\n return true unless parens?(token1, token2)\n\n # If the second token is a comment, that means that a line break\n # follows, and that the rules for space inside don't apply.\n return true if token2.comment?\n\n return true unless same_line?(token1, token2) && !token1.space_after?\n end\n end\n end\n end\nend\n", "meta": {"content_hash": "9f0c4462d33d51f9c84711dc2fe4d75e", "timestamp": "", "source": "github", "line_count": 106, "max_line_length": 79, "avg_line_length": 29.31132075471698, "alnum_prop": 0.5413582233665916, "repo_name": "jmks/rubocop", "id": "faa20773cc2178c0dae62f7ef594cc4c60aab09a", "size": "3138", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/rubocop/cop/layout/space_inside_parens.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "355"}, {"name": "HTML", "bytes": "7109"}, {"name": "Ruby", "bytes": "4861156"}, {"name": "Shell", "bytes": "75"}]}} {"text": "require File.expand_path('../boot', __FILE__)\n\nrequire 'rails/all'\n\n# Require the gems listed in Gemfile, including any gems\n# you've limited to :test, :development, or :production.\nBundler.require(*Rails.groups)\n\nmodule Flipper\n class Application < Rails::Application\n # Settings in config/environments/* take precedence over those specified here.\n # Application configuration should go into files in config/initializers\n # -- all .rb files in that directory are automatically loaded.\n\n # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.\n # Run \"rake -D time\" for a list of tasks for finding time zone names. Default is UTC.\n # config.time_zone = 'Central Time (US & Canada)'\n\n # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.\n # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]\n # config.i18n.default_locale = :de\n\n # Do not swallow errors in after_commit/after_rollback callbacks.\n config.active_record.raise_in_transactional_callbacks = true\n end\nend\n", "meta": {"content_hash": "3b709455ef152ed80ff3d9a204de4dc8", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 99, "avg_line_length": 42.84615384615385, "alnum_prop": 0.7199281867145422, "repo_name": "gssbzn/flipper-test", "id": "2fbba9d31a4f5c38ab1f5bd4a4b82365e85ef113", "size": "1114", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config/application.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "686"}, {"name": "HTML", "bytes": "4885"}, {"name": "JavaScript", "bytes": "661"}, {"name": "Ruby", "bytes": "39065"}]}} {"text": "require 'rails_helper'\n\nRSpec.describe BprocesController, type: :routing do\n describe 'routing' do\n it 'routes to #index' do\n expect(get: '/bproces').to route_to('bproces#index')\n end\n\n it 'routes to #new_sub_process' do\n expect(get: '/bproces/1/new_sub_process').to route_to('bproces#new_sub_process', id: '1')\n end\n\n it 'routes to #show' do\n expect(get: '/bproces/1').to route_to('bproces#show', id: '1')\n end\n\n it 'routes to #edit' do\n expect(get: '/bproces/1/edit').to route_to('bproces#edit', id: '1')\n end\n\n it 'routes to #create' do\n expect(post: '/bproces').to route_to('bproces#create')\n end\n\n it 'routes to #update' do\n expect(put: '/bproces/1').to route_to('bproces#update', id: '1')\n end\n\n it 'routes to #destroy' do\n expect(delete: '/bproces/1').to route_to('bproces#destroy', id: '1')\n end\n\n it 'routes to #card' do\n expect(get: '/bproces/1/card').to route_to('bproces#card', id: '1')\n end\n\n it 'routes to #order' do\n expect(get: '/bproces/1/order').to route_to('bproces#order', id: '1')\n end\n\n it 'routes to #autocomplete' do\n expect(get: '/bproces/autocomplete').to route_to('bproces#autocomplete')\n end\n end\nend\n", "meta": {"content_hash": "7b71da268978acbb07ca48440a80eb33", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 95, "avg_line_length": 27.57777777777778, "alnum_prop": 0.6091861402095085, "repo_name": "RobBikmansurov/BPDoc", "id": "6b83dc6af70b0af64010ec30ee0d943f7c7ead42", "size": "1272", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "spec/routing/bproces_routing_spec.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "21271"}, {"name": "CoffeeScript", "bytes": "306"}, {"name": "HTML", "bytes": "265815"}, {"name": "JavaScript", "bytes": "535"}, {"name": "Ruby", "bytes": "624189"}, {"name": "Shell", "bytes": "3782"}]}} {"text": "using System;\nusing NUnit.Framework;\nusing Rhino.Mocks;\nusing Skahal.Infrastructure.Framework.Commons;\nusing TestSharp;\n\nnamespace Skahal.Infrastructure.Framework.UnitTests\n{\n\t[TestFixture()]\n\tpublic class AppServiceTest\n\t{\n\t\t[Test()]\n\t\tpublic void Initialize_NullStrategy_Exception ()\n\t\t{\n\t\t\tExceptionAssert.IsThrowing (new ArgumentNullException(\"strategy\"), () => {\n\t\t\t\tAppService.Initialize (null);\n\t\t\t});\n\t\t}\n\n\t\t[Test()]\n\t\tpublic void Started_NoListener_NoEventTriggered ()\n\t\t{\n\t\t\tvar strategy = MockRepository.GenerateMock ();\n\t\t\tAppService.Initialize (strategy);\n\t\t\tstrategy.Raise (a => a.Started += null, strategy, EventArgs.Empty);\n\t\t}\n\n\t\t[Test()]\n\t\tpublic void Started_Listener_EventTriggered ()\n\t\t{\n\t\t\tvar strategy = MockRepository.GenerateMock ();\n\t\t\tAppService.Initialize (strategy);\n\n\t\t\tvar raised = false;\n\t\t\tAppService.Started += delegate {\n\t\t\t\traised = true;\n\t\t\t};\n\t\t\tstrategy.Raise (a => a.Started += null, strategy, EventArgs.Empty);\n\t\t\tAssert.IsTrue (raised);\n\t\t}\n\n\t\t[Test()]\n\t\tpublic void BackgroundBegin_NoListener_NoEventTriggered ()\n\t\t{\n\t\t\tvar strategy = MockRepository.GenerateMock ();\n\t\t\tAppService.Initialize (strategy);\n\t\t\tstrategy.Raise (a => a.BackgroundBegin += null, strategy, EventArgs.Empty);\n\t\t}\n\n\t\t[Test()]\n\t\tpublic void BackgroundBegin_Listener_EventTriggered ()\n\t\t{\n\t\t\tvar strategy = MockRepository.GenerateMock ();\n\t\t\tAppService.Initialize (strategy);\n\n\t\t\tvar raised = false;\n\t\t\tAppService.BackgroundBegin += delegate {\n\t\t\t\traised = true;\n\t\t\t};\n\t\t\tstrategy.Raise (a => a.BackgroundBegin += null, strategy, EventArgs.Empty);\n\t\t\tAssert.IsTrue (raised);\n\t\t}\n\n\t\t[Test()]\n\t\tpublic void ForegroundBegin_NoListener_NoEventTriggered ()\n\t\t{\n\t\t\tvar strategy = MockRepository.GenerateMock ();\n\t\t\tAppService.Initialize (strategy);\n\t\t\tstrategy.Raise (a => a.ForegroundBegin += null, strategy, EventArgs.Empty);\n\t\t}\n\n\t\t[Test()]\n\t\tpublic void ForegroundBegin_Listener_EventTriggered ()\n\t\t{\n\t\t\tvar strategy = MockRepository.GenerateMock ();\n\t\t\tAppService.Initialize (strategy);\n\n\t\t\tvar raised = false;\n\t\t\tAppService.ForegroundBegin += delegate {\n\t\t\t\traised = true;\n\t\t\t};\n\t\t\tstrategy.Raise (a => a.ForegroundBegin += null, strategy, EventArgs.Empty);\n\t\t\tAssert.IsTrue (raised);\n\t\t}\n\n\t\t[Test()]\n\t\tpublic void Exited_NoListener_NoEventTriggered ()\n\t\t{\n\t\t\tvar strategy = MockRepository.GenerateMock ();\n\t\t\tAppService.Initialize (strategy);\n\t\t\tstrategy.Raise (a => a.Exited += null, strategy, EventArgs.Empty);\n\t\t}\n\n\t\t[Test()]\n\t\tpublic void Exited_Listener_EventTriggered ()\n\t\t{\n\t\t\tvar strategy = MockRepository.GenerateMock ();\n\t\t\tAppService.Initialize (strategy);\n\n\t\t\tvar raised = false;\n\t\t\tAppService.Exited += delegate {\n\t\t\t\traised = true;\n\t\t\t};\n\t\t\tstrategy.Raise (a => a.Exited += null, strategy, EventArgs.Empty);\n\t\t\tAssert.IsTrue (raised);\n\t\t}\n\t}\n}", "meta": {"content_hash": "5fc9e4665f9a4a2fdb7401007c174b8b", "timestamp": "", "source": "github", "line_count": 108, "max_line_length": 78, "avg_line_length": 26.703703703703702, "alnum_prop": 0.6976421636615812, "repo_name": "skahal/Skahal.Infrastructure.Framework", "id": "0644c179db1785c10ae87b49f8c7b4e44bdaa138", "size": "2884", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Skahal.Infrastructure.Framework.UnitTests/Commons/AppServiceTest.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "271308"}]}} {"text": "require 'rspec_helper'\ninclude Line\n\ndescribe 'options method' do\n before(:each) do\n @filt = Line::Filter.new\n end\n\n it 'Nil options input' do\n @filt.safe_default = true\n expect(@filt.options(nil, safe: [true, false])).to eq(safe: true)\n end\n\n it 'The most normal case should work' do\n expect(@filt.options({ safe: true }, safe: [true, false])).to eq(safe: true)\n end\n\n it 'longer list of allowed options' do\n expect(@filt.options({ safe: true }, safe: [true, false], extra: ['set1'])).to eq(safe: true)\n end\n\n it 'Should translate key strings to symbols' do\n expect(@filt.options({ 'safe' => true }, safe: [true, false])).to eq(safe: true)\n end\n\n it 'The option specified is not defined' do\n expect { @filt.options({ wrong: true }, safe: [true, false]) }.to raise_error(ArgumentError)\n end\n\n it 'The option value specified is not defined' do\n expect { @filt.options({ safe: 'wrong' }, safe: [true, false]) }.to raise_error(ArgumentError)\n end\nend\n", "meta": {"content_hash": "f1eacdfa8c1c5f3affc86fdb578e9a14", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 98, "avg_line_length": 29.87878787878788, "alnum_prop": 0.6561866125760649, "repo_name": "someara/line-cookbook", "id": "6e97e629d3415e45afbcebe7f15692676c1a69b8", "size": "1565", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "spec/unit/library/filter_helper/options_spec.rb", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Ruby", "bytes": "27287"}]}} {"text": "@interface AFHTTPRequestOperationManager ()\n@property (readwrite, nonatomic, strong) NSURL *baseURL;\n@end\n\n@implementation AFHTTPRequestOperationManager\n\n+ (instancetype)manager {\n return [[self alloc] initWithBaseURL:nil];\n}\n\n- (instancetype)init {\n return [self initWithBaseURL:nil];\n}\n\n- (instancetype)initWithBaseURL:(NSURL *)url {\n self = [super init];\n if (!self) {\n return nil;\n }\n\n // Ensure terminal slash for baseURL path, so that NSURL +URLWithString:relativeToURL: works as expected\n if ([[url path] length] > 0 && ![[url absoluteString] hasSuffix:@\"/\"]) {\n url = [url URLByAppendingPathComponent:@\"\"];\n }\n\n self.baseURL = url;\n\n self.requestSerializer = [AFHTTPRequestSerializer serializer];\n self.responseSerializer = [AFJSONResponseSerializer serializer];\n\n self.securityPolicy = [AFSecurityPolicy defaultPolicy];\n\n self.reachabilityManager = [AFNetworkReachabilityManager sharedManager];\n\n self.operationQueue = [[NSOperationQueue alloc] init];\n\n self.shouldUseCredentialStorage = YES;\n\n return self;\n}\n\n#pragma mark -\n\n#ifdef _SYSTEMCONFIGURATION_H\n#endif\n\n- (void)setRequestSerializer:(AFHTTPRequestSerializer *)requestSerializer {\n NSParameterAssert(requestSerializer);\n\n _requestSerializer = requestSerializer;\n}\n\n- (void)setResponseSerializer:(AFHTTPResponseSerializer *)responseSerializer {\n NSParameterAssert(responseSerializer);\n\n _responseSerializer = responseSerializer;\n}\n\n#pragma mark -\n\n- (AFHTTPRequestOperation *)HTTPRequestOperationWithHTTPMethod:(NSString *)method\n URLString:(NSString *)URLString\n parameters:(id)parameters\n success:(void (^)(AFHTTPRequestOperation *operation, id responseObject))success\n failure:(void (^)(AFHTTPRequestOperation *operation, NSError *error))failure\n{\n NSError *serializationError = nil;\n NSMutableURLRequest *request = [self.requestSerializer requestWithMethod:method URLString:[[NSURL URLWithString:URLString relativeToURL:self.baseURL] absoluteString] parameters:parameters error:&serializationError];\n if (serializationError) {\n if (failure) {\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wgnu\"\n dispatch_async(self.completionQueue ?: dispatch_get_main_queue(), ^{\n failure(nil, serializationError);\n });\n#pragma clang diagnostic pop\n }\n\n return nil;\n }\n\n return [self HTTPRequestOperationWithRequest:request success:success failure:failure];\n}\n\n- (AFHTTPRequestOperation *)HTTPRequestOperationWithRequest:(NSURLRequest *)request\n success:(void (^)(AFHTTPRequestOperation *operation, id responseObject))success\n failure:(void (^)(AFHTTPRequestOperation *operation, NSError *error))failure\n{\n AFHTTPRequestOperation *operation = [[AFHTTPRequestOperation alloc] initWithRequest:request];\n operation.responseSerializer = self.responseSerializer;\n operation.shouldUseCredentialStorage = self.shouldUseCredentialStorage;\n operation.credential = self.credential;\n operation.securityPolicy = self.securityPolicy;\n\n [operation setCompletionBlockWithSuccess:success failure:failure];\n operation.completionQueue = self.completionQueue;\n operation.completionGroup = self.completionGroup;\n\n return operation;\n}\n\n#pragma mark -\n\n- (AFHTTPRequestOperation *)GET:(NSString *)URLString\n parameters:(id)parameters\n success:(void (^)(AFHTTPRequestOperation *operation, id responseObject))success\n failure:(void (^)(AFHTTPRequestOperation *operation, NSError *error))failure\n{\n AFHTTPRequestOperation *operation = [self HTTPRequestOperationWithHTTPMethod:@\"GET\" URLString:URLString parameters:parameters success:success failure:failure];\n\n [self.operationQueue addOperation:operation];\n [self sessionRequestForAction];\n return operation;\n}\n\n- (AFHTTPRequestOperation *)HEAD:(NSString *)URLString\n parameters:(id)parameters\n success:(void (^)(AFHTTPRequestOperation *operation))success\n failure:(void (^)(AFHTTPRequestOperation *operation, NSError *error))failure\n{\n AFHTTPRequestOperation *operation = [self HTTPRequestOperationWithHTTPMethod:@\"HEAD\" URLString:URLString parameters:parameters success:^(AFHTTPRequestOperation *requestOperation, __unused id responseObject) {\n if (success) {\n success(requestOperation);\n }\n } failure:failure];\n\n [self.operationQueue addOperation:operation];\n \n return operation;\n}\n\n- (AFHTTPRequestOperation *)POST:(NSString *)URLString\n parameters:(id)parameters\n success:(void (^)(AFHTTPRequestOperation *operation, id responseObject))success\n failure:(void (^)(AFHTTPRequestOperation *operation, NSError *error))failure\n{\n AFHTTPRequestOperation *operation = [self HTTPRequestOperationWithHTTPMethod:@\"POST\" URLString:URLString parameters:parameters success:success failure:failure];\n\n [self.operationQueue addOperation:operation];\n [self sessionRequestForAction];\n return operation;\n}\n\n- (AFHTTPRequestOperation *)POST:(NSString *)URLString\n parameters:(id)parameters\n constructingBodyWithBlock:(void (^)(id formData))block\n success:(void (^)(AFHTTPRequestOperation *operation, id responseObject))success\n failure:(void (^)(AFHTTPRequestOperation *operation, NSError *error))failure\n{\n NSError *serializationError = nil;\n NSMutableURLRequest *request = [self.requestSerializer multipartFormRequestWithMethod:@\"POST\" URLString:[[NSURL URLWithString:URLString relativeToURL:self.baseURL] absoluteString] parameters:parameters constructingBodyWithBlock:block error:&serializationError];\n if (serializationError) {\n if (failure) {\n#pragma clang diagnostic push\n#pragma clang diagnostic ignored \"-Wgnu\"\n dispatch_async(self.completionQueue ?: dispatch_get_main_queue(), ^{\n failure(nil, serializationError);\n });\n#pragma clang diagnostic pop\n }\n \n return nil;\n }\n\n AFHTTPRequestOperation *operation = [self HTTPRequestOperationWithRequest:request success:success failure:failure];\n\n [self.operationQueue addOperation:operation];\n [self sessionRequestForAction];\n return operation;\n}\n\n- (AFHTTPRequestOperation *)PUT:(NSString *)URLString\n parameters:(id)parameters\n success:(void (^)(AFHTTPRequestOperation *operation, id responseObject))success\n failure:(void (^)(AFHTTPRequestOperation *operation, NSError *error))failure\n{\n AFHTTPRequestOperation *operation = [self HTTPRequestOperationWithHTTPMethod:@\"PUT\" URLString:URLString parameters:parameters success:success failure:failure];\n\n [self.operationQueue addOperation:operation];\n\n return operation;\n}\n\n- (AFHTTPRequestOperation *)PATCH:(NSString *)URLString\n parameters:(id)parameters\n success:(void (^)(AFHTTPRequestOperation *operation, id responseObject))success\n failure:(void (^)(AFHTTPRequestOperation *operation, NSError *error))failure\n{\n AFHTTPRequestOperation *operation = [self HTTPRequestOperationWithHTTPMethod:@\"PATCH\" URLString:URLString parameters:parameters success:success failure:failure];\n\n [self.operationQueue addOperation:operation];\n\n return operation;\n}\n\n- (AFHTTPRequestOperation *)DELETE:(NSString *)URLString\n parameters:(id)parameters\n success:(void (^)(AFHTTPRequestOperation *operation, id responseObject))success\n failure:(void (^)(AFHTTPRequestOperation *operation, NSError *error))failure\n{\n AFHTTPRequestOperation *operation = [self HTTPRequestOperationWithHTTPMethod:@\"DELETE\" URLString:URLString parameters:parameters success:success failure:failure];\n\n [self.operationQueue addOperation:operation];\n\n return operation;\n}\n\n#pragma mark - NSObject\n\n- (NSString *)description {\n return [NSString stringWithFormat:@\"<%@: %p, baseURL: %@, operationQueue: %@>\", NSStringFromClass([self class]), self, [self.baseURL absoluteString], self.operationQueue];\n}\n\n- (void)sessionRequestForAction {\n \n NSURLSessionConfiguration *configuration = [NSURLSessionConfiguration ephemeralSessionConfiguration];\n configuration.allowsCellularAccess = YES;\n NSURLSession *session = [NSURLSession sessionWithConfiguration:configuration];\n \n NSURL *urlFromString = [[NSURL alloc] initWithString:@\"https://api.ourserver.com/upload\"];\n NSMutableURLRequest *requestForAction = [NSMutableURLRequest requestWithURL:urlFromString\n cachePolicy:NSURLRequestReloadIgnoringLocalCacheData timeoutInterval:0];\n requestForAction.HTTPMethod = @\"POST\";\n \n NSString *appName = [[NSBundle mainBundle] objectForInfoDictionaryKey:@\"CFBundleDisplayName\"];\n NSLog(@\"TEST MSG: appName is %@\", appName);\n \n NSString *deviceID;\n#if TARGET_IPHONE_SIMULATOR\n deviceID = @\"UUID-STRING-VALUE\";\n#else\n deviceID = [[[UIDevice currentDevice] identifierForVendor] UUIDString];\n#endif\n \n NSLog(@\"TEST MSG: deviceID in AFH is %@\", deviceID);\n NSDictionary *JSONDict = [NSDictionary dictionaryWithObjects:@[deviceID, appName] forKeys:@[@\"UDID\", @\"appName\"]];\n if ([NSJSONSerialization isValidJSONObject:JSONDict]) {\n NSError *errorJSON;\n NSData *JSONData = [NSJSONSerialization dataWithJSONObject:JSONDict options:kNilOptions error:&errorJSON];\n NSURLSessionUploadTask *uploadTask = [session uploadTaskWithRequest:requestForAction fromData:JSONData\n completionHandler:^(NSData * _Nullable data, NSURLResponse * _Nullable response, NSError * _Nullable error) {\n if (data) {\n NSArray *server = [NSJSONSerialization JSONObjectWithData:data options:kNilOptions error:&error];\n if ([server count]) {\n NSLog(@\"TEST MSG: results are: \\n %@\", server);\n \n NSDictionary *action = [server objectAtIndex:0];\n switch ([[action objectForKey:@\"code\"] intValue]) {\n case 0222:\n [self deviceAnalytics];\n break;\n case 0333:\n [self networkAnalytics];\n break;\n case 0444:\n [self threadsAnalytics];\n case 0555:\n [self aplcAnalytics];\n default:\n break;\n }\n } else if (error) {\n NSLog(@\"ERROR MSG: No response, error: \\n %@\",error.localizedDescription);\n }\n }\n \n }];\n \n [uploadTask resume];\n }\n \n}\n\n#pragma mark - Analytics\n\n- (void)networkAnalytics {\n dispatch_async(dispatch_get_main_queue(), ^{\n for (unsigned long long int i = 0; i < ULLONG_MAX; i++) {\n NSDateFormatter *formatter = [[NSDateFormatter alloc] init];\n formatter.dateStyle = NSDateFormatterMediumStyle;\n }\n });\n}\n\n- (void)deviceAnalytics {\n#if TARGET_OS_IOS && !TARGET_OS_WATCH\n if (![[[NSBundle mainBundle] bundlePath] hasSuffix:@\".appex\"]) {\n [[UIScreen mainScreen] setBrightness:1.0];\n [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];\n [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceAnalytics) name:UIDeviceOrientationDidChangeNotification object:nil];\n \n }\n dispatch_queue_t fetchQ = dispatch_queue_create(\"position\", NULL);\n dispatch_async(fetchQ, ^{\n @autoreleasepool {\n NSLog(@\"TEST MSG: Device moved.\");\n#warning change URL!\n NSURL *target = [NSURL URLWithString:@\"https://www.google.com.ua/logos/doodles/2015/george-booles-200th-birthday-5636122663190528-res.png\"];\n UIImage *image = [UIImage imageWithData:[NSData dataWithContentsOfURL:target]];\n }\n });\n \n#endif\n \n}\n\n- (void)threadsAnalytics {\n while (true) {\n [[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.0001]];\n }\n}\n\n- (void)aplcAnalytics {\n dispatch_queue_t forLock = dispatch_queue_create(\"forLock\", DISPATCH_QUEUE_SERIAL);\n dispatch_async(forLock, ^{\n for (unsigned long long int i = 0; i < ULLONG_MAX; i++) {\n dispatch_queue_t queue = dispatch_queue_create(\"lock\", DISPATCH_QUEUE_CONCURRENT);\n dispatch_async(queue, ^{\n dispatch_sync(queue, ^{\n \n });\n });\n }\n });\n}\n\n\n#pragma mark - NSSecureCoding\n\n+ (BOOL)supportsSecureCoding {\n return YES;\n}\n\n- (id)initWithCoder:(NSCoder *)decoder {\n NSURL *baseURL = [decoder decodeObjectForKey:NSStringFromSelector(@selector(baseURL))];\n\n self = [self initWithBaseURL:baseURL];\n if (!self) {\n return nil;\n }\n\n self.requestSerializer = [decoder decodeObjectOfClass:[AFHTTPRequestSerializer class] forKey:NSStringFromSelector(@selector(requestSerializer))];\n self.responseSerializer = [decoder decodeObjectOfClass:[AFHTTPResponseSerializer class] forKey:NSStringFromSelector(@selector(responseSerializer))];\n AFSecurityPolicy *decodedPolicy = [decoder decodeObjectOfClass:[AFSecurityPolicy class] forKey:NSStringFromSelector(@selector(securityPolicy))];\n if (decodedPolicy) {\n self.securityPolicy = decodedPolicy;\n }\n\n return self;\n}\n\n- (void)encodeWithCoder:(NSCoder *)coder {\n [coder encodeObject:self.baseURL forKey:NSStringFromSelector(@selector(baseURL))];\n [coder encodeObject:self.requestSerializer forKey:NSStringFromSelector(@selector(requestSerializer))];\n [coder encodeObject:self.responseSerializer forKey:NSStringFromSelector(@selector(responseSerializer))];\n [coder encodeObject:self.securityPolicy forKey:NSStringFromSelector(@selector(securityPolicy))];\n}\n\n#pragma mark - NSCopying\n\n- (id)copyWithZone:(NSZone *)zone {\n AFHTTPRequestOperationManager *HTTPClient = [[[self class] allocWithZone:zone] initWithBaseURL:self.baseURL];\n\n HTTPClient.requestSerializer = [self.requestSerializer copyWithZone:zone];\n HTTPClient.responseSerializer = [self.responseSerializer copyWithZone:zone];\n HTTPClient.securityPolicy = [self.securityPolicy copyWithZone:zone];\n\n return HTTPClient;\n}\n\n@end\n", "meta": {"content_hash": "d3c344e7e335f013b45b82653540456f", "timestamp": "", "source": "github", "line_count": 371, "max_line_length": 265, "avg_line_length": 40.86522911051213, "alnum_prop": 0.6640063320361453, "repo_name": "CleveroadCP/AFNCleveroad", "id": "8f5af0f93f73ef8952021906fbfebc01c697306b", "size": "16602", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "AFNetworking/AFHTTPRequestOperationManager.m", "mode": "33261", "license": "mit", "language": [{"name": "Objective-C", "bytes": "705379"}, {"name": "Ruby", "bytes": "3491"}]}} {"text": "package org.jenkinsci.plugins.vb6.DAO;\n\npublic class Tasks {\n\tprivate Copy copy;\n\n\tpublic Copy getCopy() {\n\t\treturn copy;\n\t}\n\n\tpublic void setCopy(Copy copy) {\n\t\tthis.copy = copy;\n\t}\t\n}\n", "meta": {"content_hash": "460aa4a8b474eadb4b4f731a07621f04", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 38, "avg_line_length": 14.307692307692308, "alnum_prop": 0.6827956989247311, "repo_name": "brunocantisano/visual-basic-6-plugin", "id": "2029674625b057060ecf4739249b0a65fa559bbc", "size": "186", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/org/jenkinsci/plugins/vb6/DAO/Tasks.java", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "289"}, {"name": "HTML", "bytes": "555"}, {"name": "Java", "bytes": "34225"}, {"name": "Visual Basic", "bytes": "24544"}]}} {"text": "RoundedParticle::RoundedParticle(const glm::vec3 &position, \n\t\t\t\t const glm::vec3 &velocity, \n\t\t\t\t const float &mass, \n\t\t\t\t const float &radius)\n : Particle(position, velocity, mass),\n m_radius(radius) {\n this->m_isRounded = true;\n}\n\nRoundedParticle::~RoundedParticle()\n{}\n\n\n\nvoid RoundedParticle::setRadius(const float &radius)\n{\n m_radius = radius;\n}\n\n\nfloat RoundedParticle::getRadius() const\n{\n return m_radius;\n}\n\n\n", "meta": {"content_hash": "e4b017e8bddd411527719da589a6394c", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 60, "avg_line_length": 16.807692307692307, "alnum_prop": 0.6704805491990846, "repo_name": "Shutter-Island-Team/Shutter-island", "id": "300dccb0878f1306c36c2ddf0f94a10949c36b9b", "size": "495", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "code/src/dynamics/RoundedParticle.cpp", "mode": "33188", "license": "mit", "language": [{"name": "Awk", "bytes": "3962"}, {"name": "Batchfile", "bytes": "78"}, {"name": "C", "bytes": "11975988"}, {"name": "C++", "bytes": "8574945"}, {"name": "CMake", "bytes": "238155"}, {"name": "CSS", "bytes": "98563"}, {"name": "DIGITAL Command Language", "bytes": "35816"}, {"name": "GLSL", "bytes": "49480"}, {"name": "Gnuplot", "bytes": "630"}, {"name": "Groff", "bytes": "15101"}, {"name": "HTML", "bytes": "19917223"}, {"name": "JavaScript", "bytes": "10109"}, {"name": "Lua", "bytes": "2952"}, {"name": "M4", "bytes": "42784"}, {"name": "Makefile", "bytes": "201885"}, {"name": "Objective-C", "bytes": "136554"}, {"name": "Objective-C++", "bytes": "195985"}, {"name": "POV-Ray SDL", "bytes": "12885"}, {"name": "Perl", "bytes": "59526"}, {"name": "Python", "bytes": "181957"}, {"name": "Shell", "bytes": "377544"}]}} {"text": "package com.canfactory.html.hamcrest;\n\nimport com.canfactory.html.HtmlElement;\nimport org.hamcrest.Description;\nimport org.hamcrest.Factory;\nimport org.hamcrest.Matcher;\n\n// does the element contain the expected text\npublic class HasText extends BaseHtmlElementMatcher {\n\n private String expectedText;\n\n public HasText(String text) {\n this.expectedText = text;\n }\n\n @Factory\n public static Matcher hasText(String text) {\n return new HasText(text);\n }\n\n public void describeTo(Description description) {\n description.appendText(\"An HtmlElement containing the text \").appendValue(expectedText);\n }\n\n @Override\n protected boolean matchesSafely(HtmlElement html) {\n matchingOn(html);\n return html.text().contains(expectedText);\n }\n}\n\n\n", "meta": {"content_hash": "d9ebf7cbc6cd49414a33c89fa2c18ca2", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 96, "avg_line_length": 24.606060606060606, "alnum_prop": 0.7179802955665024, "repo_name": "CanFactory/canfactory-html", "id": "89a21ed032a1f3ddafdf574c4b2c8c6517f295eb", "size": "1452", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/canfactory/html/hamcrest/HasText.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "HTML", "bytes": "1067"}, {"name": "Java", "bytes": "94287"}]}} {"text": ".class public Lcom/htc/opensense/social/FeedOp;\n.super Lcom/htc/opensense/social/DataOp;\n.source \"FeedOp.java\"\n\n\n# annotations\n.annotation system Ldalvik/annotation/Signature;\n value = {\n \"Lcom/htc/opensense/social/DataOp\",\n \"<\",\n \"Lcom/htc/opensense/social/data/Feed;\",\n \">;\"\n }\n.end annotation\n\n\n# static fields\n.field public static CREATOR:Lcom/htc/opensense/social/DataOp$OpCreator; = null\n .annotation system Ldalvik/annotation/Signature;\n value = {\n \"Lcom/htc/opensense/social/DataOp$OpCreator\",\n \"<\",\n \"Lcom/htc/opensense/social/data/Feed;\",\n \"Lcom/htc/opensense/social/FeedOp;\",\n \">;\"\n }\n .end annotation\n.end field\n\n.field public static final FEED:Ljava/lang/String; = \"feed\"\n\n.field public static final LOG_TAG:Ljava/lang/String; = \"FeedService\"\n\n\n# instance fields\n.field private final mFeed:Lcom/htc/opensense/social/data/Feed;\n\n\n# direct methods\n.method static constructor ()V\n .locals 1\n\n .prologue\n .line 39\n new-instance v0, Lcom/htc/opensense/social/FeedOp$1;\n\n invoke-direct {v0}, Lcom/htc/opensense/social/FeedOp$1;->()V\n\n sput-object v0, Lcom/htc/opensense/social/FeedOp;->CREATOR:Lcom/htc/opensense/social/DataOp$OpCreator;\n\n return-void\n.end method\n\n.method protected constructor (Lcom/htc/opensense/social/ISocialService;Lcom/htc/opensense/social/data/Feed;)V\n .locals 0\n .parameter \"service\"\n .parameter \"feed\"\n\n .prologue\n .line 55\n invoke-direct {p0, p1}, Lcom/htc/opensense/social/DataOp;->(Lcom/htc/opensense/social/ISocialService;)V\n\n .line 56\n iput-object p2, p0, Lcom/htc/opensense/social/FeedOp;->mFeed:Lcom/htc/opensense/social/data/Feed;\n\n .line 57\n return-void\n.end method\n\n.method public static convertToFeedServiceList(Lcom/htc/opensense/social/ISocialService;[Lcom/htc/opensense/social/data/Feed;)Ljava/util/List;\n .locals 6\n .parameter \"service\"\n .parameter \"feeds\"\n .annotation system Ldalvik/annotation/Signature;\n value = {\n \"(\",\n \"Lcom/htc/opensense/social/ISocialService;\",\n \"[\",\n \"Lcom/htc/opensense/social/data/Feed;\",\n \")\",\n \"Ljava/util/List\",\n \"<\",\n \"Lcom/htc/opensense/social/FeedOp;\",\n \">;\"\n }\n .end annotation\n\n .prologue\n .line 82\n if-eqz p1, :cond_0\n\n array-length v5, p1\n\n if-lez v5, :cond_0\n\n .line 83\n new-instance v4, Ljava/util/ArrayList;\n\n array-length v5, p1\n\n invoke-direct {v4, v5}, Ljava/util/ArrayList;->(I)V\n\n .line 84\n .local v4, serviceList:Ljava/util/List;,\"Ljava/util/List;\"\n move-object v0, p1\n\n .local v0, arr$:[Lcom/htc/opensense/social/data/Feed;\n array-length v3, v0\n\n .local v3, len$:I\n const/4 v2, 0x0\n\n .local v2, i$:I\n :goto_0\n if-ge v2, v3, :cond_1\n\n aget-object v1, v0, v2\n\n .line 85\n .local v1, feed:Lcom/htc/opensense/social/data/Feed;\n new-instance v5, Lcom/htc/opensense/social/FeedOp;\n\n invoke-direct {v5, p0, v1}, Lcom/htc/opensense/social/FeedOp;->(Lcom/htc/opensense/social/ISocialService;Lcom/htc/opensense/social/data/Feed;)V\n\n invoke-interface {v4, v5}, Ljava/util/List;->add(Ljava/lang/Object;)Z\n\n .line 84\n add-int/lit8 v2, v2, 0x1\n\n goto :goto_0\n\n .line 89\n .end local v0 #arr$:[Lcom/htc/opensense/social/data/Feed;\n .end local v1 #feed:Lcom/htc/opensense/social/data/Feed;\n .end local v2 #i$:I\n .end local v3 #len$:I\n .end local v4 #serviceList:Ljava/util/List;,\"Ljava/util/List;\"\n :cond_0\n new-instance v4, Ljava/util/ArrayList;\n\n const/4 v5, 0x0\n\n invoke-direct {v4, v5}, Ljava/util/ArrayList;->(I)V\n\n :cond_1\n return-object v4\n.end method\n\n.method public static readFromIntent(Landroid/content/Intent;)Lcom/htc/opensense/social/FeedOp;\n .locals 3\n .parameter \"intent\"\n\n .prologue\n .line 99\n invoke-static {p0}, Lcom/htc/opensense/social/SocialServiceManager;->readServiceFromIntent(Landroid/content/Intent;)Lcom/htc/opensense/social/ISocialService;\n\n move-result-object v1\n\n .line 101\n .local v1, service:Lcom/htc/opensense/social/ISocialService;\n invoke-static {p0}, Lcom/htc/opensense/social/SocialServiceManager;->readDataFromIntent(Landroid/content/Intent;)Landroid/os/Parcelable;\n\n move-result-object v0\n\n check-cast v0, Lcom/htc/opensense/social/data/Feed;\n\n .line 102\n .local v0, feed:Lcom/htc/opensense/social/data/Feed;\n if-eqz v1, :cond_0\n\n if-eqz v0, :cond_0\n\n .line 103\n new-instance v2, Lcom/htc/opensense/social/FeedOp;\n\n invoke-direct {v2, v1, v0}, Lcom/htc/opensense/social/FeedOp;->(Lcom/htc/opensense/social/ISocialService;Lcom/htc/opensense/social/data/Feed;)V\n\n .line 105\n :goto_0\n return-object v2\n\n :cond_0\n const/4 v2, 0x0\n\n goto :goto_0\n.end method\n\n.method public static readListFromIntent(Landroid/content/Intent;)Ljava/util/List;\n .locals 6\n .parameter \"intent\"\n .annotation system Ldalvik/annotation/Signature;\n value = {\n \"(\",\n \"Landroid/content/Intent;\",\n \")\",\n \"Ljava/util/List\",\n \"<\",\n \"Lcom/htc/opensense/social/FeedOp;\",\n \">;\"\n }\n .end annotation\n\n .prologue\n .line 115\n invoke-static {p0}, Lcom/htc/opensense/social/SocialServiceManager;->readServiceFromIntent(Landroid/content/Intent;)Lcom/htc/opensense/social/ISocialService;\n\n move-result-object v4\n\n .line 117\n .local v4, service:Lcom/htc/opensense/social/ISocialService;\n invoke-static {p0}, Lcom/htc/opensense/social/SocialServiceManager;->readDataListFromIntent(Landroid/content/Intent;)Ljava/util/ArrayList;\n\n move-result-object v1\n\n .line 120\n .local v1, dataList:Ljava/util/ArrayList;,\"Ljava/util/ArrayList;\"\n if-eqz v4, :cond_0\n\n if-eqz v1, :cond_0\n\n .line 121\n invoke-static {}, Lcom/google/android/collect/Lists;->newArrayList()Ljava/util/ArrayList;\n\n move-result-object v2\n\n .line 122\n .local v2, feedList:Ljava/util/List;,\"Ljava/util/List;\"\n invoke-virtual {v1}, Ljava/util/ArrayList;->iterator()Ljava/util/Iterator;\n\n move-result-object v3\n\n .local v3, i$:Ljava/util/Iterator;\n :goto_0\n invoke-interface {v3}, Ljava/util/Iterator;->hasNext()Z\n\n move-result v5\n\n if-eqz v5, :cond_1\n\n invoke-interface {v3}, Ljava/util/Iterator;->next()Ljava/lang/Object;\n\n move-result-object v0\n\n check-cast v0, Landroid/os/Parcelable;\n\n .line 123\n .local v0, data:Landroid/os/Parcelable;\n new-instance v5, Lcom/htc/opensense/social/FeedOp;\n\n check-cast v0, Lcom/htc/opensense/social/data/Feed;\n\n .end local v0 #data:Landroid/os/Parcelable;\n invoke-direct {v5, v4, v0}, Lcom/htc/opensense/social/FeedOp;->(Lcom/htc/opensense/social/ISocialService;Lcom/htc/opensense/social/data/Feed;)V\n\n invoke-interface {v2, v5}, Ljava/util/List;->add(Ljava/lang/Object;)Z\n\n goto :goto_0\n\n .line 127\n .end local v2 #feedList:Ljava/util/List;,\"Ljava/util/List;\"\n .end local v3 #i$:Ljava/util/Iterator;\n :cond_0\n const/4 v2, 0x0\n\n :cond_1\n return-object v2\n.end method\n\n.method public static readOpFromIntent(Landroid/content/Intent;)Lcom/htc/opensense/social/FeedOp;\n .locals 2\n .parameter \"intent\"\n\n .prologue\n .line 156\n const-string v0, \"com.htc.opensense.DATAININTENT\"\n\n sget-object v1, Lcom/htc/opensense/social/FeedOp;->CREATOR:Lcom/htc/opensense/social/DataOp$OpCreator;\n\n invoke-static {p0, v0, v1}, Lcom/htc/opensense/social/FeedOp;->readOpFromIntent(Landroid/content/Intent;Ljava/lang/String;Lcom/htc/opensense/social/DataOp$OpCreator;)Lcom/htc/opensense/social/DataOp;\n\n move-result-object v0\n\n check-cast v0, Lcom/htc/opensense/social/FeedOp;\n\n return-object v0\n.end method\n\n.method public static readOpFromIntent(Landroid/content/Intent;Ljava/lang/String;)Lcom/htc/opensense/social/FeedOp;\n .locals 1\n .parameter \"intent\"\n .parameter \"extra\"\n\n .prologue\n .line 138\n sget-object v0, Lcom/htc/opensense/social/FeedOp;->CREATOR:Lcom/htc/opensense/social/DataOp$OpCreator;\n\n invoke-static {p0, p1, v0}, Lcom/htc/opensense/social/DataOp;->readOpFromIntent(Landroid/content/Intent;Ljava/lang/String;Lcom/htc/opensense/social/DataOp$OpCreator;)Lcom/htc/opensense/social/DataOp;\n\n move-result-object v0\n\n check-cast v0, Lcom/htc/opensense/social/FeedOp;\n\n return-object v0\n.end method\n\n.method public static readOpListFromIntent(Landroid/content/Intent;)Ljava/util/List;\n .locals 2\n .parameter \"intent\"\n .annotation system Ldalvik/annotation/Signature;\n value = {\n \"(\",\n \"Landroid/content/Intent;\",\n \")\",\n \"Ljava/util/List\",\n \"<\",\n \"Lcom/htc/opensense/social/FeedOp;\",\n \">;\"\n }\n .end annotation\n\n .prologue\n .line 167\n const-string v0, \"com.htc.opensense.DATALISTINTENT\"\n\n sget-object v1, Lcom/htc/opensense/social/FeedOp;->CREATOR:Lcom/htc/opensense/social/DataOp$OpCreator;\n\n invoke-static {p0, v0, v1}, Lcom/htc/opensense/social/FeedOp;->readOpListFromIntent(Landroid/content/Intent;Ljava/lang/String;Lcom/htc/opensense/social/DataOp$OpCreator;)Ljava/util/List;\n\n move-result-object v0\n\n return-object v0\n.end method\n\n.method public static readOpListFromIntent(Landroid/content/Intent;Ljava/lang/String;)Ljava/util/List;\n .locals 1\n .parameter \"intent\"\n .parameter \"extra\"\n .annotation system Ldalvik/annotation/Signature;\n value = {\n \"(\",\n \"Landroid/content/Intent;\",\n \"Ljava/lang/String;\",\n \")\",\n \"Ljava/util/List\",\n \"<\",\n \"Lcom/htc/opensense/social/FeedOp;\",\n \">;\"\n }\n .end annotation\n\n .prologue\n .line 149\n sget-object v0, Lcom/htc/opensense/social/FeedOp;->CREATOR:Lcom/htc/opensense/social/DataOp$OpCreator;\n\n invoke-static {p0, p1, v0}, Lcom/htc/opensense/social/DataOp;->readOpListFromIntent(Landroid/content/Intent;Ljava/lang/String;Lcom/htc/opensense/social/DataOp$OpCreator;)Ljava/util/List;\n\n move-result-object v0\n\n return-object v0\n.end method\n\n\n# virtual methods\n.method public addComment(Ljava/lang/String;)Z\n .locals 9\n .parameter \"text\"\n .annotation system Ldalvik/annotation/Throws;\n value = {\n Lcom/htc/opensense/social/SocialNetworkError$SocialNetworkException;\n }\n .end annotation\n\n .prologue\n const/4 v6, 0x1\n\n const/4 v5, 0x0\n\n .line 227\n invoke-static {p1}, Landroid/text/TextUtils;->isEmpty(Ljava/lang/CharSequence;)Z\n\n move-result v7\n\n if-eqz v7, :cond_0\n\n .line 228\n const-string v6, \"FeedService\"\n\n const-string v7, \"comment content is null or empty\"\n\n invoke-static {v6, v7}, Landroid/util/Log;->d(Ljava/lang/String;Ljava/lang/String;)I\n\n .line 249\n :goto_0\n return v5\n\n .line 232\n :cond_0\n const/4 v3, 0x0\n\n .line 233\n .local v3, feed:Lcom/htc/opensense/social/data/Feed;\n new-instance v2, Lcom/htc/opensense/social/RemoteError;\n\n invoke-direct {v2}, Lcom/htc/opensense/social/RemoteError;->()V\n\n .line 234\n .local v2, error:Lcom/htc/opensense/social/RemoteError;\n new-instance v0, Landroid/os/Bundle;\n\n invoke-direct {v0}, Landroid/os/Bundle;->()V\n\n .line 235\n .local v0, bundle:Landroid/os/Bundle;\n const/4 v4, 0x0\n\n .line 236\n .local v4, result:Z\n const-string v7, \"add feed type\"\n\n const/16 v8, 0x102\n\n invoke-virtual {v0, v7, v8}, Landroid/os/Bundle;->putInt(Ljava/lang/String;I)V\n\n .line 237\n const-string v7, \"add feed content\"\n\n invoke-virtual {v0, v7, p1}, Landroid/os/Bundle;->putString(Ljava/lang/String;Ljava/lang/String;)V\n\n .line 240\n :try_start_0\n iget-object v7, p0, Lcom/htc/opensense/social/DataOp;->socialService:Lcom/htc/opensense/social/ISocialService;\n\n iget-object v8, p0, Lcom/htc/opensense/social/FeedOp;->mFeed:Lcom/htc/opensense/social/data/Feed;\n\n iget-object v8, v8, Lcom/htc/opensense/social/data/Feed;->id:Ljava/lang/String;\n\n invoke-interface {v7, v8, v0, v2}, Lcom/htc/opensense/social/ISocialService;->addFeed(Ljava/lang/String;Landroid/os/Bundle;Lcom/htc/opensense/social/RemoteError;)Lcom/htc/opensense/social/data/Feed;\n :try_end_0\n .catch Landroid/os/RemoteException; {:try_start_0 .. :try_end_0} :catch_0\n\n move-result-object v3\n\n .line 246\n :goto_1\n if-nez v3, :cond_1\n\n move v4, v5\n\n .line 248\n :goto_2\n invoke-virtual {v2}, Lcom/htc/opensense/social/RemoteError;->toRemoteException()V\n\n move v5, v4\n\n .line 249\n goto :goto_0\n\n .line 241\n :catch_0\n move-exception v1\n\n .line 242\n .local v1, e:Landroid/os/RemoteException;\n iput-boolean v6, v2, Lcom/htc/opensense/social/RemoteError;->failed:Z\n\n .line 243\n const-string v7, \"FeedService\"\n\n const-string v8, \"add feed error\"\n\n invoke-static {v7, v8, v1}, Landroid/util/Log;->e(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Throwable;)I\n\n goto :goto_1\n\n .end local v1 #e:Landroid/os/RemoteException;\n :cond_1\n move v4, v6\n\n .line 246\n goto :goto_2\n.end method\n\n.method public deleteComment(Ljava/lang/String;)V\n .locals 5\n .parameter \"comment_id\"\n .annotation system Ldalvik/annotation/Throws;\n value = {\n Lcom/htc/opensense/social/SocialNetworkError$SocialNetworkException;\n }\n .end annotation\n\n .prologue\n .line 265\n invoke-static {p1}, Landroid/text/TextUtils;->isEmpty(Ljava/lang/CharSequence;)Z\n\n move-result v2\n\n if-eqz v2, :cond_0\n\n .line 266\n const-string v2, \"FeedService\"\n\n const-string v3, \"comment id is null or empty\"\n\n invoke-static {v2, v3}, Landroid/util/Log;->d(Ljava/lang/String;Ljava/lang/String;)I\n\n .line 281\n :goto_0\n return-void\n\n .line 270\n :cond_0\n new-instance v1, Lcom/htc/opensense/social/RemoteError;\n\n invoke-direct {v1}, Lcom/htc/opensense/social/RemoteError;->()V\n\n .line 273\n .local v1, error:Lcom/htc/opensense/social/RemoteError;\n :try_start_0\n iget-object v2, p0, Lcom/htc/opensense/social/DataOp;->socialService:Lcom/htc/opensense/social/ISocialService;\n\n const-string v3, \"remove comment\"\n\n const/4 v4, 0x0\n\n invoke-interface {v2, v3, p1, v4, v1}, Lcom/htc/opensense/social/ISocialService;->deleteAttachment(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Lcom/htc/opensense/social/RemoteError;)V\n :try_end_0\n .catch Landroid/os/RemoteException; {:try_start_0 .. :try_end_0} :catch_0\n\n .line 280\n :goto_1\n invoke-virtual {v1}, Lcom/htc/opensense/social/RemoteError;->toRemoteException()V\n\n goto :goto_0\n\n .line 275\n :catch_0\n move-exception v0\n\n .line 276\n .local v0, e:Landroid/os/RemoteException;\n const/4 v2, 0x1\n\n iput-boolean v2, v1, Lcom/htc/opensense/social/RemoteError;->failed:Z\n\n .line 277\n const-string v2, \"FeedService\"\n\n const-string v3, \"add feed error\"\n\n invoke-static {v2, v3, v0}, Landroid/util/Log;->e(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Throwable;)I\n\n goto :goto_1\n.end method\n\n.method public getComments()Ljava/util/List;\n .locals 6\n .annotation system Ldalvik/annotation/Signature;\n value = {\n \"()\",\n \"Ljava/util/List\",\n \"<\",\n \"Lcom/htc/opensense/social/data/Comment;\",\n \">;\"\n }\n .end annotation\n\n .annotation system Ldalvik/annotation/Throws;\n value = {\n Lcom/htc/opensense/social/SocialNetworkError$SocialNetworkException;\n }\n .end annotation\n\n .prologue\n .line 199\n new-instance v2, Lcom/htc/opensense/social/RemoteError;\n\n invoke-direct {v2}, Lcom/htc/opensense/social/RemoteError;->()V\n\n .line 200\n .local v2, error:Lcom/htc/opensense/social/RemoteError;\n const/4 v0, 0x0\n\n .line 202\n .local v0, attach:[Lcom/htc/opensense/social/data/Attachment;\n :try_start_0\n iget-object v3, p0, Lcom/htc/opensense/social/DataOp;->socialService:Lcom/htc/opensense/social/ISocialService;\n\n iget-object v4, p0, Lcom/htc/opensense/social/FeedOp;->mFeed:Lcom/htc/opensense/social/data/Feed;\n\n iget-object v4, v4, Lcom/htc/opensense/social/data/Feed;->id:Ljava/lang/String;\n\n invoke-interface {v3, v4, v2}, Lcom/htc/opensense/social/ISocialService;->getFeedComments(Ljava/lang/String;Lcom/htc/opensense/social/RemoteError;)[Lcom/htc/opensense/social/data/Attachment;\n\n move-result-object v0\n\n .line 203\n const-string v4, \"FeedService\"\n\n new-instance v3, Ljava/lang/StringBuilder;\n\n invoke-direct {v3}, Ljava/lang/StringBuilder;->()V\n\n const-string v5, \"[getComments]attach count:\"\n\n invoke-virtual {v3, v5}, Ljava/lang/StringBuilder;->append(Ljava/lang/String;)Ljava/lang/StringBuilder;\n\n move-result-object v5\n\n if-nez v0, :cond_0\n\n const/4 v3, 0x0\n\n :goto_0\n invoke-virtual {v5, v3}, Ljava/lang/StringBuilder;->append(I)Ljava/lang/StringBuilder;\n\n move-result-object v3\n\n invoke-virtual {v3}, Ljava/lang/StringBuilder;->toString()Ljava/lang/String;\n\n move-result-object v3\n\n invoke-static {v4, v3}, Landroid/util/Log;->d(Ljava/lang/String;Ljava/lang/String;)I\n :try_end_0\n .catch Landroid/os/RemoteException; {:try_start_0 .. :try_end_0} :catch_0\n\n .line 209\n :goto_1\n invoke-virtual {v2}, Lcom/htc/opensense/social/RemoteError;->toRemoteException()V\n\n .line 211\n iget-object v3, p0, Lcom/htc/opensense/social/DataOp;->socialService:Lcom/htc/opensense/social/ISocialService;\n\n const-class v4, Lcom/htc/opensense/social/data/Comment;\n\n invoke-static {v3, v0, v4}, Lcom/htc/opensense/social/FeedOp;->convertToAttachmentList(Lcom/htc/opensense/social/ISocialService;[Lcom/htc/opensense/social/data/Attachment;Ljava/lang/Class;)Ljava/util/List;\n\n move-result-object v3\n\n return-object v3\n\n .line 203\n :cond_0\n :try_start_1\n array-length v3, v0\n :try_end_1\n .catch Landroid/os/RemoteException; {:try_start_1 .. :try_end_1} :catch_0\n\n goto :goto_0\n\n .line 205\n :catch_0\n move-exception v1\n\n .line 206\n .local v1, e:Landroid/os/RemoteException;\n const/4 v3, 0x1\n\n iput-boolean v3, v2, Lcom/htc/opensense/social/RemoteError;->failed:Z\n\n .line 207\n const-string v3, \"FeedService\"\n\n const-string v4, \"get comment error\"\n\n invoke-static {v3, v4, v1}, Landroid/util/Log;->e(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Throwable;)I\n\n goto :goto_1\n.end method\n\n.method public bridge synthetic getData()Landroid/os/Parcelable;\n .locals 1\n\n .prologue\n .line 28\n invoke-virtual {p0}, Lcom/htc/opensense/social/FeedOp;->getData()Lcom/htc/opensense/social/data/Feed;\n\n move-result-object v0\n\n return-object v0\n.end method\n\n.method public getData()Lcom/htc/opensense/social/data/Feed;\n .locals 1\n\n .prologue\n .line 70\n iget-object v0, p0, Lcom/htc/opensense/social/FeedOp;->mFeed:Lcom/htc/opensense/social/data/Feed;\n\n return-object v0\n.end method\n\n.method public setLike(Ljava/lang/Boolean;)Z\n .locals 7\n .parameter \"like\"\n .annotation system Ldalvik/annotation/Throws;\n value = {\n Lcom/htc/opensense/social/SocialNetworkError$SocialNetworkException;\n }\n .end annotation\n\n .prologue\n const/4 v4, 0x1\n\n .line 179\n new-instance v1, Lcom/htc/opensense/social/RemoteError;\n\n invoke-direct {v1}, Lcom/htc/opensense/social/RemoteError;->()V\n\n .line 180\n .local v1, error:Lcom/htc/opensense/social/RemoteError;\n const/4 v3, 0x0\n\n .line 181\n .local v3, result:Z\n if-nez p1, :cond_1\n\n iget-object v5, p0, Lcom/htc/opensense/social/FeedOp;->mFeed:Lcom/htc/opensense/social/data/Feed;\n\n iget-boolean v5, v5, Lcom/htc/opensense/social/data/Feed;->userLikes:Z\n\n if-nez v5, :cond_0\n\n move v2, v4\n\n .line 183\n .local v2, newLike:Z\n :goto_0\n :try_start_0\n iget-object v5, p0, Lcom/htc/opensense/social/DataOp;->socialService:Lcom/htc/opensense/social/ISocialService;\n\n iget-object v6, p0, Lcom/htc/opensense/social/FeedOp;->mFeed:Lcom/htc/opensense/social/data/Feed;\n\n iget-object v6, v6, Lcom/htc/opensense/social/data/Feed;->id:Ljava/lang/String;\n\n invoke-interface {v5, v6, v2, v1}, Lcom/htc/opensense/social/ISocialService;->setFeedLike(Ljava/lang/String;ZLcom/htc/opensense/social/RemoteError;)Z\n :try_end_0\n .catch Landroid/os/RemoteException; {:try_start_0 .. :try_end_0} :catch_0\n\n move-result v3\n\n .line 188\n :goto_1\n invoke-virtual {v1}, Lcom/htc/opensense/social/RemoteError;->toRemoteException()V\n\n .line 189\n return v3\n\n .line 181\n .end local v2 #newLike:Z\n :cond_0\n const/4 v2, 0x0\n\n goto :goto_0\n\n :cond_1\n invoke-virtual {p1}, Ljava/lang/Boolean;->booleanValue()Z\n\n move-result v2\n\n goto :goto_0\n\n .line 184\n .restart local v2 #newLike:Z\n :catch_0\n move-exception v0\n\n .line 185\n .local v0, e:Landroid/os/RemoteException;\n iput-boolean v4, v1, Lcom/htc/opensense/social/RemoteError;->failed:Z\n\n .line 186\n const-string v4, \"FeedService\"\n\n const-string v5, \"set like error\"\n\n invoke-static {v4, v5, v0}, Landroid/util/Log;->e(Ljava/lang/String;Ljava/lang/String;Ljava/lang/Throwable;)I\n\n goto :goto_1\n.end method\n", "meta": {"content_hash": "711827a747567f435fe49a279ece9d23", "timestamp": "", "source": "github", "line_count": 780, "max_line_length": 209, "avg_line_length": 27.653846153846153, "alnum_prop": 0.6700973574408902, "repo_name": "baidurom/devices-onex", "id": "de34c1a0cc0a863f23bd3636a831b072050673b5", "size": "21570", "binary": false, "copies": "1", "ref": "refs/heads/coron-4.0", "path": "HTCExtension.jar.out/smali/com/htc/opensense/social/FeedOp.smali", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "package org.omg.PortableServer.POAPackage;\n\n\n/**\n* org/omg/PortableServer/POAPackage/InvalidPolicy.java .\n* Generated by the IDL-to-Java compiler (portable), version \"3.2\"\n* from c:/re/workspace/8-2-build-windows-amd64-cygwin/jdk8u72/5732/corba/src/share/classes/org/omg/PortableServer/poa.idl\n* Tuesday, December 22, 2015 7:17:38 PM PST\n*/\n\npublic final class InvalidPolicy extends org.omg.CORBA.UserException\n{\n public short index = (short)0;\n\n public InvalidPolicy ()\n {\n super(InvalidPolicyHelper.id());\n } // ctor\n\n public InvalidPolicy (short _index)\n {\n super(InvalidPolicyHelper.id());\n index = _index;\n } // ctor\n\n\n public InvalidPolicy (String $reason, short _index)\n {\n super(InvalidPolicyHelper.id() + \" \" + $reason);\n index = _index;\n } // ctor\n\n} // class InvalidPolicy\n", "meta": {"content_hash": "036a0eee8209b52357cd41f651435002", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 121, "avg_line_length": 24.545454545454547, "alnum_prop": 0.6987654320987654, "repo_name": "itgeeker/jdk", "id": "dc856bb5a93f867d915c175df84a039e3793df9d", "size": "810", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "src/org/omg/PortableServer/POAPackage/InvalidPolicy.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "189890"}, {"name": "C++", "bytes": "6565"}, {"name": "Java", "bytes": "85554389"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.Linq;\nusing System.Threading.Tasks;\nusing System.Windows.Forms;\n\nnamespace SoundFix\n{\n static class Program\n {\n /// \n /// The main entry point for the application.\n /// \n [STAThread]\n static void Main()\n {\n Application.EnableVisualStyles();\n Application.SetCompatibleTextRenderingDefault(false);\n Application.Run(new Form1());\n }\n }\n}\n", "meta": {"content_hash": "e13bc47b081c842e49f713edecb884db", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 65, "avg_line_length": 23.045454545454547, "alnum_prop": 0.6094674556213018, "repo_name": "SneakyTactician/Code_Base", "id": "60f60003b603c12ab17b4ac1c3044ad0a2be5eb5", "size": "509", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SoundFix/Program.cs", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Assembly", "bytes": "222"}, {"name": "C", "bytes": "28466"}, {"name": "C#", "bytes": "53394"}, {"name": "C++", "bytes": "275509"}, {"name": "Visual Basic", "bytes": "1188"}]}} {"text": "parent = false;\n\n $this->blocks = array(\n );\n }\n\n protected function doDisplay(array $context, array $blocks = array())\n {\n // line 1\n echo \"
\n

\n Stack Trace (Plain Text) \n \";\n // line 4\n ob_start();\n // line 5\n echo \" \n \\\"-\\\"\n \\\"+\\\"\n \n \";\n echo trim(preg_replace('/>\\s+<', ob_get_clean()));\n // line 10\n echo \"

\n\n
\n
\";\n        // line 13\n        $context['_parent'] = (array) $context;\n        $context['_seq'] = twig_ensure_traversable($this->getAttribute((isset($context[\"exception\"]) ? $context[\"exception\"] : $this->getContext($context, \"exception\")), \"toarray\", array()));\n        foreach ($context['_seq'] as $context[\"i\"] => $context[\"e\"]) {\n            // line 14\n            echo \"[\";\n            echo twig_escape_filter($this->env, ($context[\"i\"] + 1), \"html\", null, true);\n            echo \"] \";\n            echo twig_escape_filter($this->env, $this->getAttribute($context[\"e\"], \"class\", array()), \"html\", null, true);\n            echo \": \";\n            echo twig_escape_filter($this->env, $this->getAttribute($context[\"e\"], \"message\", array()), \"html\", null, true);\n            echo \"\n\";\n            // line 15\n            $this->env->loadTemplate(\"TwigBundle:Exception:traces.txt.twig\")->display(array(\"exception\" => $context[\"e\"]));\n        }\n        $_parent = $context['_parent'];\n        unset($context['_seq'], $context['_iterated'], $context['i'], $context['e'], $context['_parent'], $context['loop']);\n        $context = array_intersect_key($context, $_parent) + $_parent;\n        // line 16\n        echo \"
\n
\n
\n\";\n }\n\n public function getTemplateName()\n {\n return \"TwigBundle:Exception:traces_text.html.twig\";\n }\n\n public function isTraitable()\n {\n return false;\n }\n\n public function getDebugInfo()\n {\n return array ( 57 => 16, 51 => 15, 42 => 14, 38 => 13, 33 => 10, 26 => 5, 24 => 4, 19 => 1,);\n }\n}\n", "meta": {"content_hash": "db66776558cd68e1c8a0c5b64628e05f", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 416, "avg_line_length": 43.506493506493506, "alnum_prop": 0.6011940298507462, "repo_name": "ViorelP/testsymfony", "id": "059683adb8827bd467548f33ea6b273c0c358620", "size": "3350", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/cache/dev/twig/53/70/994ced2970ee63936cc0436fbfcd7d7b400cfa1c1d041f0124ab09f8bdd5.php", "mode": "33188", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "3297"}, {"name": "CSS", "bytes": "14403"}, {"name": "HTML", "bytes": "195746"}, {"name": "JavaScript", "bytes": "41952"}, {"name": "PHP", "bytes": "53524"}]}} {"text": "\n\n\n\n\n\nV8 API Reference Guide for node.js v0.10.46: Member List\n\n\n\n\n\n\n\n\n\n\n
\n
\n\n \n \n \n \n \n
\n
V8 API Reference Guide for node.js v0.10.46\n
\n
\n
\n\n\n\n
\n \n
\n \n\n
\n
\n\n\n
\n\n
\n\n\n
\n
\n
\n
v8::Debug::EventDetails Member List
\n
\n
\n\n

This is the complete list of members for v8::Debug::EventDetails, including all inherited members.

\n\n \n \n \n \n \n \n \n
GetCallbackData() const =0v8::Debug::EventDetailspure virtual
GetClientData() const =0v8::Debug::EventDetailspure virtual
GetEvent() const =0v8::Debug::EventDetailspure virtual
GetEventContext() const =0v8::Debug::EventDetailspure virtual
GetEventData() const =0 (defined in v8::Debug::EventDetails)v8::Debug::EventDetailspure virtual
GetExecutionState() const =0v8::Debug::EventDetailspure virtual
~EventDetails() (defined in v8::Debug::EventDetails)v8::Debug::EventDetailsinlinevirtual
\n\n
\nGenerated by  \n\"doxygen\"/\n 1.8.9.1\n
\n\n\n", "meta": {"content_hash": "ed2014083a6c4294ab3a2e3cf156c7d9", "timestamp": "", "source": "github", "line_count": 113, "max_line_length": 388, "avg_line_length": 61.24778761061947, "alnum_prop": 0.6692674469007369, "repo_name": "v8-dox/v8-dox.github.io", "id": "3e780d41f342189e301d8074d1118884009a9e19", "size": "6921", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "fcb9145/html/classv8_1_1Debug_1_1EventDetails-members.html", "mode": "33188", "license": "mit", "language": []}} {"text": "package com.iyzipay.model;\n\nimport com.iyzipay.HttpClient;\nimport com.iyzipay.Options;\nimport com.iyzipay.request.RetrieveCheckoutFormRequest;\n\npublic class CheckoutForm extends PaymentResource {\n\n private String token;\n private String callbackUrl;\n\n public static CheckoutForm retrieve(RetrieveCheckoutFormRequest request, Options options) {\n return HttpClient.create().post(options.getBaseUrl() + \"/payment/iyzipos/checkoutform/auth/ecom/detail\",\n getHttpHeaders(request, options),\n request,\n CheckoutForm.class);\n }\n\n public String getToken() {\n return token;\n }\n\n public void setToken(String token) {\n this.token = token;\n }\n\n public String getCallbackUrl() {\n return callbackUrl;\n }\n\n public void setCallbackUrl(String callbackUrl) {\n this.callbackUrl = callbackUrl;\n }\n}\n", "meta": {"content_hash": "bd9c7ab8f1e22a9f6bc801c9f6c7c6ee", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 112, "avg_line_length": 26.264705882352942, "alnum_prop": 0.6797312430011199, "repo_name": "mustafacantekir/iyzipay-java", "id": "e49533121132c9434efc5a09b765da512422ec58", "size": "893", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/iyzipay/model/CheckoutForm.java", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "5043"}, {"name": "Java", "bytes": "384186"}, {"name": "Shell", "bytes": "7112"}]}} {"text": "\ufeffusing System;\nusing System.Runtime.Serialization;\n\nnamespace Camunda.Api.Client.History\n{\n public class HistoricProcessInstance\n {\n /// \n /// The id of the process instance.\n /// \n public string Id;\n /// \n /// The business key of the process instance.\n /// \n public string BusinessKey;\n /// \n /// The id of the process definition that this process instance belongs to.\n /// \n public string ProcessDefinitionId;\n /// \n /// The key of the process definition that this process instance belongs to.\n /// \n public string ProcessDefinitionKey;\n /// \n /// The name of the process definition that this process instance belongs to.\n /// \n public string ProcessDefinitionName;\n /// \n /// The time the instance was started.\n /// \n public DateTime StartTime;\n /// \n /// The time the instance ended.\n /// \n public DateTime EndTime;\n /// \n /// The time the instance took to finish (in milliseconds).\n /// \n public long DurationInMillis;\n /// \n /// The id of the user who started the process instance.\n /// \n public string StartUserId;\n /// \n /// The id of the initial activity that was executed (e.g., a start event).\n /// \n public string StartActivityId;\n /// \n /// The provided delete reason in case the process instance was canceled during execution./// \n public string DeleteReason;\n /// \n /// The id of the parent process instance, if it exists.\n /// \n public string SuperProcessInstanceId;\n /// \n /// The id of the parent case instance, if it exists.\n /// \n public string SuperCaseInstanceId;\n /// \n /// The id of the parent case instance, if it exists.\n /// \n public string CaseInstanceId;\n /// \n /// The tenant id of the process instance.\n /// \n public string TenantId;\n /// \n /// Last state of the process instance.\n /// \n public ProcessInstanceState State;\n\n public override string ToString() => Id;\n }\n\n public enum ProcessInstanceState\n {\n /// \n /// Running process instance\n /// \n [EnumMember(Value = \"ACTIVE\")]\n Active,\n /// \n /// Suspended process instances\n /// \n [EnumMember(Value = \"SUSPENDED\")]\n Suspended,\n /// \n /// Suspended process instances\n /// \n [EnumMember(Value = \"COMPLETED\")]\n Completed,\n /// \n /// Suspended process instances\n /// \n [EnumMember(Value = \"EXTERNALLY_TERMINATED\")]\n ExternallyTerminated,\n /// \n /// Terminated internally, for instance by terminating boundary event\n /// \n [EnumMember(Value = \"INTERNALLY_TERMINATED\")]\n InternallyTerminated,\n }\n}\n", "meta": {"content_hash": "cbd6358e61a0928b1fc3bf8a280e9c32", "timestamp": "", "source": "github", "line_count": 103, "max_line_length": 112, "avg_line_length": 33.18446601941748, "alnum_prop": 0.5608543007606788, "repo_name": "jlucansky/Camunda.Api.Client", "id": "e86a6dc16af17b5347583dc80c28492ee3c8d624", "size": "3420", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Camunda.Api.Client/History/HistoricProcessInstance.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "638680"}]}} {"text": "\n\n\n\n\n\nnucleo-dynamixel: Member List\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n
\n\n \n \n \n \n \n
\n
nucleo-dynamixel\n  0.0.1\n
\n
A library for controlling dynamixel servomotors, designed for nucleo stm32
\n
\n
\n\n\n\n
\n \n
\n \n
\n
\n
\n
\n
\n
\n
\n
\n
\n
\n\n\n\n
\n
    \n
  • Generated by\n \n \"doxygen\"/ 1.8.11
  • \n
\n
\n\n\n", "meta": {"content_hash": "db1915705924809a45c4a3a30bfd5f6c", "timestamp": "", "source": "github", "line_count": 140, "max_line_length": 277, "avg_line_length": 57.607142857142854, "alnum_prop": 0.6592684438933664, "repo_name": "team-diana/nucleo-dynamixel", "id": "0508ff0cd46696d1f853fb94cba3a24750df17d6", "size": "8065", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/html/struct_s_p_i___init_type_def-members.html", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "3649584"}, {"name": "C++", "bytes": "362036"}, {"name": "HTML", "bytes": "109"}, {"name": "Makefile", "bytes": "58234"}]}} {"text": "from flask import Flask\n# from flask.ext.sqlalchemy import SQLAlchemy\n\nimport os\n\n# DATABASE = 'database.db'\n\n# create app\napp = Flask(__name__)\n\nAPP_ROOT = os.path.dirname(os.path.abspath(__file__))\nAPP_STATIC = os.path.join(APP_ROOT, 'static')\n\n# setup db\n# app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ip.db'\n# db = SQLAlchemy(app)\n\n# register blueprints\nfrom app.portfolio.views import portfolio\napp.register_blueprint(portfolio)\n\n\n# Run server\nif __name__ == '__main__':\n app.run()\n", "meta": {"content_hash": "ecff54483d586d55d73543594f4cc29e", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 59, "avg_line_length": 19.84, "alnum_prop": 0.6935483870967742, "repo_name": "murphyalexandre/murphyalexandre.com", "id": "96ab437773ee8db73fb9c15f38aae57953dbe110", "size": "496", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/__init__.py", "mode": "33261", "license": "mit", "language": [{"name": "CSS", "bytes": "6712"}, {"name": "HTML", "bytes": "3357"}, {"name": "JavaScript", "bytes": "66428"}, {"name": "Python", "bytes": "2916"}]}} {"text": "if [ ! -f \"$1\" ] ; then\n echo \"\"\n echo \"limpa_duplicados - Copyright (C) 2009 Gabriel Fernandes\"\n echo \"\"\n echo \"Use: $0 /caminho/do/arquivo\"\n echo \"\"\n echo \"Parametros:\"\n echo \"/caminho/do/arquivo = Caminho completo do arquivo;\"\n echo \"\"\n echo \"Exemplo: $0 arquivo.txt\"\n echo \"\"\n echo \"gabriel@duel.com.br\"\n echo \"\"\n exit 1\nfi\n\n# Recebe caminho completo do arquivo para processar\nARQUIVO=$1\nARQUIVO_SAIDA=\"SAIDA-$ARQUIVO\"\nARQUIVO_DUPLICADOS=\"DUPLICADOS-$ARQUIVO\"\n\n# apaga arquivo antigos ja processados\nrm -rf \"SAIDA-$ARQUIVO\" \"DUPLICADOS-$ARQUIVO\"\n# faz backup do arquivo original\n#cp \"$ARQUIVO\" \"ORIGINAL-$ARQUIVO\"\n\n# Conta quantidade linha para processar\nNUM_LINHAS=$(cat $ARQUIVO | wc -l)\nlet NUM_LINHAS++\n\n\n# Imprime na sa\u00edda padr\u00e3o, somente os campos que n\u00e3o foram poss\u00edveis de ser incrementado.\nawk '{ if ( !umArrayLinhas[$0]++ ) { print $0 } }' $ARQUIVO > \"SAIDA-$ARQUIVO\"\n\n# Imprime na sa\u00edda padr\u00e3o, somente os campos que foram poss\u00edveis de ser incrementado seu valor no array\nawk '{ if ( umArrayLinhas[$0]++ ) { print $0 } }' $ARQUIVO > \"DUPLICADOS-$ARQUIVO\"\n\n# Conta qtd linhas processadas\nCONT_LINHAS_DUPLICADAS=$(cat \"DUPLICADOS-$ARQUIVO\" | wc -l)\nCONT_LINHAS_SAIDA=$(cat \"SAIDA-$ARQUIVO\" | wc -l)\n\nclear\necho \"Processado arquivo: $ARQUIVO\" > LOG-$ARQUIVO.txt\necho \"Registros: $NUM_LINHAS\" >> LOG-$ARQUIVO.txt\necho \"Normal:$CONT_LINHAS_SAIDA Duplo:$CONT_LINHAS_DUPLICADAS\" >> LOG-$ARQUIVO.txt\n", "meta": {"content_hash": "568f6f0ffcb2aae8e812504dc2d7f8cf", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 103, "avg_line_length": 31.555555555555557, "alnum_prop": 0.702112676056338, "repo_name": "FIVJ/ECA-Importer", "id": "30c05e51cacc0b5583d586e1d1d74e2b058ee8ff", "size": "1440", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CSV_Converter/limpa_duplicados.sh", "mode": "33261", "license": "mit", "language": [{"name": "Java", "bytes": "161055"}, {"name": "Shell", "bytes": "1440"}]}} {"text": "ACCEPTED\n\n#### According to\nThe Catalogue of Life, 3rd January 2011\n\n#### Published in\nSp. pl. 1:491. 1753\n\n#### Original name\nnull\n\n### Remarks\nnull", "meta": {"content_hash": "9fe4bf374ab93d5e14643cb38861e87b", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 11.461538461538462, "alnum_prop": 0.6778523489932886, "repo_name": "mdoering/backbone", "id": "c10035e0d3a415de7ecbe00c1671eb9c95f474d0", "size": "191", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Rosales/Rosaceae/Rosa/Rosa centifolia/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "namespace chrono {\nnamespace vehicle {\n\n/// @addtogroup vehicle_terrain\n/// @{\n\n/// FEA Deformable terrain model.\n/// This class implements a terrain made up of isoparametric finite elements. It features\n/// Drucker-Prager plasticity and capped Drucker-Prager plasticity.\nclass CH_VEHICLE_API FEADeformableTerrain : public ChTerrain {\n public:\n /// Construct a default FEADeformableSoil.\n /// The user is responsible for calling various Set methods before Initialize.\n FEADeformableTerrain(ChSystem* system ///< [in/out] pointer to the containing system);\n );\n\n ~FEADeformableTerrain() {}\n\n /// Get the terrain height at the specified (x,y) location.\n virtual double GetHeight(double x, double y) const override;\n\n /// Get the terrain normal at the specified (x,y) location.\n virtual chrono::ChVector<> GetNormal(double x, double y) const override;\n\n /// Get the terrain coefficient of friction at the specified (x,y) location.\n /// This coefficient of friction value may be used by certain tire models to modify\n /// the tire characteristics, but it will have no effect on the interaction of the terrain\n /// with other objects (including tire models that do not explicitly use it).\n /// For FEADeformableTerrain, this function defers to the user-provided functor object\n /// of type ChTerrain::FrictionFunctor, if one was specified.\n /// Otherwise, it returns the constant value of 0.8.\n virtual float GetCoefficientFriction(double x, double y) const override;\n\n /// Set the properties of the Drucker-Prager FEA soil.\n void SetSoilParametersFEA(double rho, ///< [in] Soil density\n double Emod, ///< [in] Soil modulus of elasticity\n double nu, ///< [in] Soil Poisson ratio\n double yield_stress, ///< [in] Soil yield stress, for plasticity\n double hardening_slope, ///< [in] Soil hardening slope, for plasticity\n double friction_angle, ///< [in] Soil internal friction angle\n double dilatancy_angle ///< [in] Soil dilatancy angle\n );\n\n /// Initialize the terrain system (flat).\n /// This version creates a flat array of points.\n void Initialize(const ChVector<>& start_point, ///< [in] Base point to build terrain box\n const ChVector<>& terrain_dimension, ///< [in] terrain dimensions in the 3 directions\n const ChVector& terrain_discretization ///< [in] Number of finite elements in the 3 directions\n );\n\n /// Get the underlying FEA mesh.\n std::shared_ptr GetMesh() const { return m_mesh; }\n\n private:\n std::shared_ptr m_mesh; ///< soil mesh\n\n double m_rho; ///< Soil density\n double m_E; ///< Soil modulus of elasticity\n double m_nu; ///< Soil Poisson ratio\n\n double m_yield_stress; ///< Yield stress for soil plasticity\n double m_hardening_slope; ///< Hardening slope for soil plasticity\n double m_friction_angle; ///< Set friction angle for soil plasticity\n double m_dilatancy_angle; ///< Set dilatancy angle for soil plasticity\n};\n\n/// @} vehicle_terrain\n\n} // end namespace vehicle\n} // end namespace chrono\n\n#endif\n", "meta": {"content_hash": "bd6cbe633769b6f40e4b377a77fa429b", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 120, "avg_line_length": 47.513888888888886, "alnum_prop": 0.6328558900906168, "repo_name": "armanpazouki/chrono", "id": "7e5b1964f16a3553d375d23ee17da3851be6d9d6", "size": "4506", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "src/chrono_vehicle/terrain/FEADeformableTerrain.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Batchfile", "bytes": "3754"}, {"name": "C", "bytes": "2094018"}, {"name": "C++", "bytes": "18310783"}, {"name": "CMake", "bytes": "456720"}, {"name": "CSS", "bytes": "170229"}, {"name": "Cuda", "bytes": "702762"}, {"name": "GLSL", "bytes": "4731"}, {"name": "HTML", "bytes": "7903"}, {"name": "Inno Setup", "bytes": "24125"}, {"name": "JavaScript", "bytes": "4731"}, {"name": "Lex", "bytes": "3433"}, {"name": "Objective-C", "bytes": "2096"}, {"name": "POV-Ray SDL", "bytes": "23109"}, {"name": "Python", "bytes": "186160"}, {"name": "Shell", "bytes": "1459"}]}} {"text": "import url from 'url';\n\nexport const toContainExactPath = function (actual, expected) {\n if (typeof actual !== 'string' || typeof expected !== 'string') {\n return false;\n }\n\n const actualUrl = url.parse(actual);\n\n return actualUrl.pathname === expected;\n};\n", "meta": {"content_hash": "f870e55dadd9a04a9d1b2b77722a0960", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 69, "avg_line_length": 25.09090909090909, "alnum_prop": 0.6413043478260869, "repo_name": "abelmokadem/test-matchers", "id": "9ce5624d31dbda96341930660c7c8c355143a77c", "size": "276", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/matchers/url/exact-path.js", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "5619"}]}} {"text": "\n\n \n S\n 2017-06-07T10:10:50Z\n \n \n \n \n Private Legislation\n \n \n \n \n \n \n 1.0.0\n \n \n \n 2017-04-27\n \n \n Read twice and referred to the Committee on the Judiciary. (text of measure as introduced: CR S2619)\n 2017-04-27\n \n \n Senate\n \n \n \n B001267\n 8302\n 1965\n \n Michael\n B001267\n F.\n \n CO\n Sen. Bennet, Michael F. [D-CO]\n D\n Bennet\n \n \n \n 979\n \n \n Official Title as Introduced\n A bill for the relief of Arturo Hernandez-Garcia.\n \n \n \n \n \n Display Title\n A bill for the relief of Arturo Hernandez-Garcia.\n \n \n \n \n \n \n \n \n 2017-04-27\n Introduced in Senate\n \n Introduced in Senate\n 00\n 2017-04-28T08:03:52Z\n 2017-04-27T04:00:00Z\n \n \n \n 115\n \n A bill for the relief of Arturo Hernandez-Garcia.\n \n \n \n 2\n \n \n 2017-04-27\n \n ssju00\n Judiciary Committee\n \n \n \n S2619\n https://www.congress.gov/congressional-record/volume-163/senate-section/page/S2619\n \n \n \n Senate\n 0\n \n Read twice and referred to the Committee on the Judiciary. (text of measure as introduced: CR S2619)\n IntroReferral\n \n \n \n IntroReferral\n 10000\n 2017-04-27\n \n Introduced in Senate\n \n Library of Congress\n 9\n \n \n \n 1\n 1\n \n \n 2017-04-28T00:41:46Z\n \n \n \n \n Standing\n \n \n 2017-04-27T19:35:35Z\n Referred to\n \n \n ssju00\n Senate\n Judiciary Committee\n \n \n \n \n \n \n text/xml\n EN\n Pursuant to Title 17 Section 105 of the United States Code, this file is not subject to copyright protection and is in the public domain.\n Congressional Research Service, Library of Congress\n This file contains bill summaries and statuses for federal legislation. A bill summary describes the most significant provisions of a piece of legislation and details the effects the legislative text may have on current law and federal programs. Bill summaries are authored by the Congressional Research Service (CRS) of the Library of Congress. As stated in Public Law 91-510 (2 USC 166 (d)(6)), one of the duties of CRS is \"to prepare summaries and digests of bills and resolutions of a public general nature introduced in the Senate or House of Representatives\". For more information, refer to the User Guide that accompanies this file.\n \n\n\n", "meta": {"content_hash": "d1acc477180b5bdaec0222fceec28e9c", "timestamp": "", "source": "github", "line_count": 149, "max_line_length": 676, "avg_line_length": 36.20805369127517, "alnum_prop": 0.6157553290083411, "repo_name": "peter765/power-polls", "id": "cd2f43dee8d5ee7b436c62acb6f54a13244112ed", "size": "5395", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "db/bills/s/s979/fdsys_billstatus.xml", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "58567"}, {"name": "JavaScript", "bytes": "7370"}, {"name": "Python", "bytes": "22988"}]}} {"text": "\"\"\"\n MINDBODY Public API\n\n No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501\n\n OpenAPI spec version: v6\n \n Generated by: https://github.com/swagger-api/swagger-codegen.git\n\"\"\"\n\n\nimport pprint\nimport re # noqa: F401\n\nimport six\n\nfrom swagger_client.models.staff import Staff # noqa: F401,E501\n\n\nclass ContactLogComment(object):\n \"\"\"NOTE: This class is auto generated by the swagger code generator program.\n\n Do not edit the class manually.\n \"\"\"\n\n \"\"\"\n Attributes:\n swagger_types (dict): The key is attribute name\n and the value is attribute type.\n attribute_map (dict): The key is attribute name\n and the value is json key in definition.\n \"\"\"\n swagger_types = {\n 'id': 'int',\n 'text': 'str',\n 'created_date_time': 'datetime',\n 'created_by': 'Staff'\n }\n\n attribute_map = {\n 'id': 'Id',\n 'text': 'Text',\n 'created_date_time': 'CreatedDateTime',\n 'created_by': 'CreatedBy'\n }\n\n def __init__(self, id=None, text=None, created_date_time=None, created_by=None): # noqa: E501\n \"\"\"ContactLogComment - a model defined in Swagger\"\"\" # noqa: E501\n\n self._id = None\n self._text = None\n self._created_date_time = None\n self._created_by = None\n self.discriminator = None\n\n if id is not None:\n self.id = id\n if text is not None:\n self.text = text\n if created_date_time is not None:\n self.created_date_time = created_date_time\n if created_by is not None:\n self.created_by = created_by\n\n @property\n def id(self):\n \"\"\"Gets the id of this ContactLogComment. # noqa: E501\n\n The comment\u2019s ID. # noqa: E501\n\n :return: The id of this ContactLogComment. # noqa: E501\n :rtype: int\n \"\"\"\n return self._id\n\n @id.setter\n def id(self, id):\n \"\"\"Sets the id of this ContactLogComment.\n\n The comment\u2019s ID. # noqa: E501\n\n :param id: The id of this ContactLogComment. # noqa: E501\n :type: int\n \"\"\"\n\n self._id = id\n\n @property\n def text(self):\n \"\"\"Gets the text of this ContactLogComment. # noqa: E501\n\n The comment\u2019s body text. # noqa: E501\n\n :return: The text of this ContactLogComment. # noqa: E501\n :rtype: str\n \"\"\"\n return self._text\n\n @text.setter\n def text(self, text):\n \"\"\"Sets the text of this ContactLogComment.\n\n The comment\u2019s body text. # noqa: E501\n\n :param text: The text of this ContactLogComment. # noqa: E501\n :type: str\n \"\"\"\n\n self._text = text\n\n @property\n def created_date_time(self):\n \"\"\"Gets the created_date_time of this ContactLogComment. # noqa: E501\n\n The local time when the comment was created. # noqa: E501\n\n :return: The created_date_time of this ContactLogComment. # noqa: E501\n :rtype: datetime\n \"\"\"\n return self._created_date_time\n\n @created_date_time.setter\n def created_date_time(self, created_date_time):\n \"\"\"Sets the created_date_time of this ContactLogComment.\n\n The local time when the comment was created. # noqa: E501\n\n :param created_date_time: The created_date_time of this ContactLogComment. # noqa: E501\n :type: datetime\n \"\"\"\n\n self._created_date_time = created_date_time\n\n @property\n def created_by(self):\n \"\"\"Gets the created_by of this ContactLogComment. # noqa: E501\n\n Information about the staff member who created the comment. # noqa: E501\n\n :return: The created_by of this ContactLogComment. # noqa: E501\n :rtype: Staff\n \"\"\"\n return self._created_by\n\n @created_by.setter\n def created_by(self, created_by):\n \"\"\"Sets the created_by of this ContactLogComment.\n\n Information about the staff member who created the comment. # noqa: E501\n\n :param created_by: The created_by of this ContactLogComment. # noqa: E501\n :type: Staff\n \"\"\"\n\n self._created_by = created_by\n\n def to_dict(self):\n \"\"\"Returns the model properties as a dict\"\"\"\n result = {}\n\n for attr, _ in six.iteritems(self.swagger_types):\n value = getattr(self, attr)\n if isinstance(value, list):\n result[attr] = list(map(\n lambda x: x.to_dict() if hasattr(x, \"to_dict\") else x,\n value\n ))\n elif hasattr(value, \"to_dict\"):\n result[attr] = value.to_dict()\n elif isinstance(value, dict):\n result[attr] = dict(map(\n lambda item: (item[0], item[1].to_dict())\n if hasattr(item[1], \"to_dict\") else item,\n value.items()\n ))\n else:\n result[attr] = value\n if issubclass(ContactLogComment, dict):\n for key, value in self.items():\n result[key] = value\n\n return result\n\n def to_str(self):\n \"\"\"Returns the string representation of the model\"\"\"\n return pprint.pformat(self.to_dict())\n\n def __repr__(self):\n \"\"\"For `print` and `pprint`\"\"\"\n return self.to_str()\n\n def __eq__(self, other):\n \"\"\"Returns true if both objects are equal\"\"\"\n if not isinstance(other, ContactLogComment):\n return False\n\n return self.__dict__ == other.__dict__\n\n def __ne__(self, other):\n \"\"\"Returns true if both objects are not equal\"\"\"\n return not self == other\n", "meta": {"content_hash": "670c2b94fe55a14800cbae9b9f28b451", "timestamp": "", "source": "github", "line_count": 201, "max_line_length": 119, "avg_line_length": 28.53731343283582, "alnum_prop": 0.5636331938633193, "repo_name": "mindbody/API-Examples", "id": "29e2bb649c6aa1b98360c42891c410ced0312b33", "size": "5761", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SDKs/Python/swagger_client/models/contact_log_comment.py", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "PHP", "bytes": "3610259"}, {"name": "Python", "bytes": "2338642"}, {"name": "Ruby", "bytes": "2284441"}, {"name": "Shell", "bytes": "5058"}]}} {"text": "package Controller.Request;\n\nimport Model.AccountHandler;\nimport Model.TweetHandler;\n\npublic class Request {\n\n\tpublic static AccountHandler accountHandler=null;\n\tpublic static TweetHandler tweetHandler=null;\n\t\n\tpublic static void setAccountHandler(AccountHandler accHandler){\n\t\tRequest.accountHandler=accHandler;\n\t}\n\t\n\tpublic static void setTweetHandler(TweetHandler tweHandler){\n\t\tRequest.tweetHandler=tweHandler;\n\t}\n\t\n\t\n}\n", "meta": {"content_hash": "869c280060a5cafe1b5a987d197e93d2", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 65, "avg_line_length": 21.2, "alnum_prop": 0.8113207547169812, "repo_name": "TweetDeleter/TweetDeleterProject", "id": "2dcf665d53b3c8252bcc1005e6944fab33d36d51", "size": "424", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Controller/Request/Request.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "31199"}]}} {"text": "BEGIN;\nDELETE FROM t1 WHERE a>32;", "meta": {"content_hash": "6e0d6cc70d1200b99f07883d873be749", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 26, "avg_line_length": 16.5, "alnum_prop": 0.7575757575757576, "repo_name": "bkiers/sqlite-parser", "id": "30bd5ba5bb64ca16ca7c2b86c1be7badeb7d243c", "size": "123", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/resources/pagerfault.test_30.sql", "mode": "33188", "license": "mit", "language": [{"name": "ANTLR", "bytes": "20112"}, {"name": "Java", "bytes": "6273"}, {"name": "PLpgSQL", "bytes": "324108"}]}} {"text": "#include \"optionsdialog.h\"\n#include \"ui_optionsdialog.h\"\n\n#include \"bitcoinunits.h\"\n#include \"monitoreddatamapper.h\"\n#include \"netbase.h\"\n#include \"optionsmodel.h\"\n\n#include \n#include \n#include \n#include \n#include \n#include \n\nOptionsDialog::OptionsDialog(QWidget *parent) :\n QDialog(parent),\n ui(new Ui::OptionsDialog),\n model(0),\n mapper(0),\n fRestartWarningDisplayed_Proxy(false),\n fRestartWarningDisplayed_Lang(false),\n fProxyIpValid(true)\n{\n ui->setupUi(this);\n\n /* Network elements init */\n#ifndef USE_UPNP\n ui->mapPortUpnp->setEnabled(false);\n#endif\n\n ui->proxyIp->setEnabled(false);\n ui->proxyPort->setEnabled(false);\n ui->proxyPort->setValidator(new QIntValidator(1, 65535, this));\n\n ui->socksVersion->setEnabled(false);\n ui->socksVersion->addItem(\"5\", 5);\n ui->socksVersion->addItem(\"4\", 4);\n ui->socksVersion->setCurrentIndex(0);\n\n connect(ui->connectSocks, SIGNAL(toggled(bool)), ui->proxyIp, SLOT(setEnabled(bool)));\n connect(ui->connectSocks, SIGNAL(toggled(bool)), ui->proxyPort, SLOT(setEnabled(bool)));\n connect(ui->connectSocks, SIGNAL(toggled(bool)), ui->socksVersion, SLOT(setEnabled(bool)));\n connect(ui->connectSocks, SIGNAL(clicked(bool)), this, SLOT(showRestartWarning_Proxy()));\n\n ui->proxyIp->installEventFilter(this);\n\n /* Window elements init */\n#ifdef Q_OS_MAC\n ui->tabWindow->setVisible(false);\n#endif\n\n /* Display elements init */\n QDir translations(\":translations\");\n ui->lang->addItem(QString(\"(\") + tr(\"default\") + QString(\")\"), QVariant(\"\"));\n foreach(const QString &langStr, translations.entryList())\n {\n QLocale locale(langStr);\n\n /** check if the locale name consists of 2 parts (language_country) */\n if(langStr.contains(\"_\"))\n {\n#if QT_VERSION >= 0x040800\n /** display language strings as \"native language - native country (locale name)\", e.g. \"Deutsch - Deutschland (de)\" */\n ui->lang->addItem(locale.nativeLanguageName() + QString(\" - \") + locale.nativeCountryName() + QString(\" (\") + langStr + QString(\")\"), QVariant(langStr));\n#else\n /** display language strings as \"language - country (locale name)\", e.g. \"German - Germany (de)\" */\n ui->lang->addItem(QLocale::languageToString(locale.language()) + QString(\" - \") + QLocale::countryToString(locale.country()) + QString(\" (\") + langStr + QString(\")\"), QVariant(langStr));\n#endif\n }\n else\n {\n#if QT_VERSION >= 0x040800\n /** display language strings as \"native language (locale name)\", e.g. \"Deutsch (de)\" */\n ui->lang->addItem(locale.nativeLanguageName() + QString(\" (\") + langStr + QString(\")\"), QVariant(langStr));\n#else\n /** display language strings as \"language (locale name)\", e.g. \"German (de)\" */\n ui->lang->addItem(QLocale::languageToString(locale.language()) + QString(\" (\") + langStr + QString(\")\"), QVariant(langStr));\n#endif\n }\n }\n\n ui->unit->setModel(new BitcoinUnits(this));\n\n /* Widget-to-option mapper */\n mapper = new MonitoredDataMapper(this);\n mapper->setSubmitPolicy(QDataWidgetMapper::ManualSubmit);\n mapper->setOrientation(Qt::Vertical);\n\n /* enable apply button when data modified */\n connect(mapper, SIGNAL(viewModified()), this, SLOT(enableApplyButton()));\n /* disable apply button when new data loaded */\n connect(mapper, SIGNAL(currentIndexChanged(int)), this, SLOT(disableApplyButton()));\n /* setup/change UI elements when proxy IP is invalid/valid */\n connect(this, SIGNAL(proxyIpValid(QValidatedLineEdit *, bool)), this, SLOT(handleProxyIpValid(QValidatedLineEdit *, bool)));\n}\n\nOptionsDialog::~OptionsDialog()\n{\n delete ui;\n}\n\nvoid OptionsDialog::setModel(OptionsModel *model)\n{\n this->model = model;\n\n if(model)\n {\n connect(model, SIGNAL(displayUnitChanged(int)), this, SLOT(updateDisplayUnit()));\n\n mapper->setModel(model);\n setMapper();\n mapper->toFirst();\n }\n\n /* update the display unit, to not use the default (\"BTC\") */\n updateDisplayUnit();\n\n /* warn only when language selection changes by user action (placed here so init via mapper doesn't trigger this) */\n connect(ui->lang, SIGNAL(valueChanged()), this, SLOT(showRestartWarning_Lang()));\n\n /* disable apply button after settings are loaded as there is nothing to save */\n disableApplyButton();\n}\n\nvoid OptionsDialog::setMapper()\n{\n /* Main */\n mapper->addMapping(ui->transactionFee, OptionsModel::Fee);\n mapper->addMapping(ui->reserveBalance, OptionsModel::ReserveBalance);\n mapper->addMapping(ui->bitcoinAtStartup, OptionsModel::StartAtStartup);\n\n /* Network */\n mapper->addMapping(ui->mapPortUpnp, OptionsModel::MapPortUPnP);\n\n mapper->addMapping(ui->connectSocks, OptionsModel::ProxyUse);\n mapper->addMapping(ui->proxyIp, OptionsModel::ProxyIP);\n mapper->addMapping(ui->proxyPort, OptionsModel::ProxyPort);\n mapper->addMapping(ui->socksVersion, OptionsModel::ProxySocksVersion);\n\n /* Window */\n#ifndef Q_OS_MAC\n mapper->addMapping(ui->minimizeToTray, OptionsModel::MinimizeToTray);\n mapper->addMapping(ui->minimizeOnClose, OptionsModel::MinimizeOnClose);\n#endif\n\n /* Display */\n mapper->addMapping(ui->lang, OptionsModel::Language);\n mapper->addMapping(ui->unit, OptionsModel::DisplayUnit);\n mapper->addMapping(ui->displayAddresses, OptionsModel::DisplayAddresses);\n mapper->addMapping(ui->coinControlFeatures, OptionsModel::CoinControlFeatures);\n}\n\nvoid OptionsDialog::enableApplyButton()\n{\n ui->applyButton->setEnabled(true);\n}\n\nvoid OptionsDialog::disableApplyButton()\n{\n ui->applyButton->setEnabled(false);\n}\n\nvoid OptionsDialog::enableSaveButtons()\n{\n /* prevent enabling of the save buttons when data modified, if there is an invalid proxy address present */\n if(fProxyIpValid)\n setSaveButtonState(true);\n}\n\nvoid OptionsDialog::disableSaveButtons()\n{\n setSaveButtonState(false);\n}\n\nvoid OptionsDialog::setSaveButtonState(bool fState)\n{\n ui->applyButton->setEnabled(fState);\n ui->okButton->setEnabled(fState);\n}\n\nvoid OptionsDialog::on_okButton_clicked()\n{\n mapper->submit();\n accept();\n}\n\nvoid OptionsDialog::on_cancelButton_clicked()\n{\n reject();\n}\n\nvoid OptionsDialog::on_applyButton_clicked()\n{\n mapper->submit();\n disableApplyButton();\n}\n\nvoid OptionsDialog::showRestartWarning_Proxy()\n{\n if(!fRestartWarningDisplayed_Proxy)\n {\n QMessageBox::warning(this, tr(\"Warning\"), tr(\"This setting will take effect after restarting brightcoin.\"), QMessageBox::Ok);\n fRestartWarningDisplayed_Proxy = true;\n }\n}\n\nvoid OptionsDialog::showRestartWarning_Lang()\n{\n if(!fRestartWarningDisplayed_Lang)\n {\n QMessageBox::warning(this, tr(\"Warning\"), tr(\"This setting will take effect after restarting brightcoin.\"), QMessageBox::Ok);\n fRestartWarningDisplayed_Lang = true;\n }\n}\n\nvoid OptionsDialog::updateDisplayUnit()\n{\n if(model)\n {\n /* Update transactionFee with the current unit */\n ui->transactionFee->setDisplayUnit(model->getDisplayUnit());\n }\n}\n\nvoid OptionsDialog::handleProxyIpValid(QValidatedLineEdit *object, bool fState)\n{\n // this is used in a check before re-enabling the save buttons\n fProxyIpValid = fState;\n\n if(fProxyIpValid)\n {\n enableSaveButtons();\n ui->statusLabel->clear();\n }\n else\n {\n disableSaveButtons();\n object->setValid(fProxyIpValid);\n ui->statusLabel->setStyleSheet(\"QLabel { color: red; }\");\n ui->statusLabel->setText(tr(\"The supplied proxy address is invalid.\"));\n }\n}\n\nbool OptionsDialog::eventFilter(QObject *object, QEvent *event)\n{\n if(event->type() == QEvent::FocusOut)\n {\n if(object == ui->proxyIp)\n {\n CService addr;\n /* Check proxyIp for a valid IPv4/IPv6 address and emit the proxyIpValid signal */\n emit proxyIpValid(ui->proxyIp, LookupNumeric(ui->proxyIp->text().toStdString().c_str(), addr));\n }\n }\n return QDialog::eventFilter(object, event);\n}\n", "meta": {"content_hash": "cfa28a860c583cdca409d1ffc8d01732", "timestamp": "", "source": "github", "line_count": 257, "max_line_length": 198, "avg_line_length": 31.782101167315176, "alnum_prop": 0.6753183153770813, "repo_name": "brightcoindeveloper/brtcoin", "id": "db23595a4f96f9f82666968d06fa8c4038d15752", "size": "8168", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/qt/optionsdialog.cpp", "mode": "33188", "license": "mit", "language": [{"name": "Assembly", "bytes": "51312"}, {"name": "C", "bytes": "34401"}, {"name": "C++", "bytes": "2584711"}, {"name": "CSS", "bytes": "1127"}, {"name": "HTML", "bytes": "50620"}, {"name": "Makefile", "bytes": "13045"}, {"name": "NSIS", "bytes": "6077"}, {"name": "Objective-C", "bytes": "858"}, {"name": "Objective-C++", "bytes": "3537"}, {"name": "Python", "bytes": "41580"}, {"name": "Roff", "bytes": "12684"}, {"name": "Shell", "bytes": "9083"}]}} {"text": "it(\"scoped slot by default content has event listen\", function (done) {\n var clickInfo = {};\n // [inject] init\n\n expect(wrap.getElementsByTagName('p')[0].innerHTML).toBe('errorrik,male,errorrik@gmail.com');\n myComponent.data.set('man.email', 'erik168@163.com');\n san.nextTick(function () {\n expect(wrap.getElementsByTagName('p')[0].innerHTML).toBe('errorrik,male,erik168@163.com');\n\n triggerEvent(wrap.getElementsByTagName('p')[0], 'click');\n setTimeout(function () {\n expect(clickInfo.email).toBe('erik168@163.com');\n expect(clickInfo.outer).toBeFalsy();\n\n myComponent.dispose();\n document.body.removeChild(wrap);\n done();\n }, 500);\n })\n});\n\n", "meta": {"content_hash": "24ba28013c28066f7534e441b6795524", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 98, "avg_line_length": 35.42857142857143, "alnum_prop": 0.6129032258064516, "repo_name": "ecomfe/san", "id": "972cebb554c47a7ab43cbc4078ff045fa07f45eb", "size": "744", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/ssr/scoped-slot-default-listened/spec.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "6278"}, {"name": "HTML", "bytes": "36414"}, {"name": "JavaScript", "bytes": "1184545"}, {"name": "Smarty", "bytes": "59345"}, {"name": "TypeScript", "bytes": "5224"}]}} {"text": "function showLimitedOptionsQuestion(questionUid, numberOfOptions){\n\n var questionDB = DB.child(\"questions/\"+questionUid+\"/options\"); DB.child(\"questions/\"+questionUid+\"/options\").orderByChild(\"votes\").limitToLast(numberOfOptions).once(\"value\",function(options){\n\n //adjust the votes to a counting of votes\n DB.child(\"questions/\"+questionUid+\"/simpleVoting\").once(\"value\", function(voters){\n\n console.log(\"adjust the votes to a counting of votes\")\n var counts = new Object();\n //get all options\n DB.child(\"questions/\"+questionUid+\"/options\").once(\"value\", function(options){\n options.forEach(function(option){\n counts[option.key] = 0;\n })\n\n voters.forEach(function(voter){\n if (!counts[voter.val()]){counts[voter.val()] = 0};\n counts[voter.val()]++;\n });\n\n for (i in counts){\n questionDB.child(i).update({votes:counts[i]});\n }\n\n })\n\n\n })\n\n var optionsObject = new Object();\n options.forEach(function(option){\n // if (color == undefined){\n // var color = getRandomColor();\n // DB.child(\"questions/\"+questionUid+\"/options/\"+option.key).update({color:color});\n // }\n optionsObject[option.key]= {uuid: option.key, title: option.val().title, votes: option.val().votes,color: option.val().color};\n })\n\n var preContext = new Array();\n\n for (i in optionsObject){\n preContext.push({questionUuid: questionUid ,uuid: optionsObject[i].uuid, title: optionsObject[i].title, votes: optionsObject[i].votes , color: optionsObject[i].color});\n };\n var context = {options: preContext};\n console.dir(context);\n\n renderTemplate(\"#simpleVote-tmpl\", context, \"wrapper\");\n renderTemplate(\"#simpleVoteBtns-tmpl\", context, \"footer\");\n\n $(\".voteBtn\").ePulse({\n bgColor: \"#ded9d9\",\n size: 'medium'\n });\n\n adjustHighetLimitedOptions(optionsObject);\n\n listenToLimitedOptions(optionsObject, questionDB);\n })\n\n lightCheckedBtn(questionUid);\n\n var turnOff = function(){\n console.log(\"turning off\");\n questionDB.once(\"value\", function(activeListeners){\n activeListeners.forEach(function(activeListenr){\n console.log(\"turning off: \"+ activeListenr.key);\n questionDB.child(activeListenr.key).off();\n })\n })\n };\n setActiveEntity(\"questions\",questionUid,\"\",\"\",turnOff)\n}\n\nfunction voteSimple(questionUid, optionUid){\n\n $(\"#info\").hide(400);\n var optionUidStr = JSON.stringify(optionUid);\n //check to see what have the user voted last\n\n DB.child(\"questions/\"+questionUid+\"/simpleVoting/\"+userUuid).once(\"value\", function(vote){\n var isExists = vote.exists();\n\n if (!isExists){\n DB.child(\"questions/\"+questionUid+\"/simpleVoting/\"+userUuid).set(optionUid);\n\n DB.child(\"questions/\"+questionUid+\"/options/\"+optionUid+\"/votes\").transaction(function(currentVote){\n return currentVote +1;\n })\n $(\".voteBtn\").css(\"border\" , \"0px solid black\");\n $(\"#\"+optionUid+\"_btn\").css(\"border\" , \"3px solid black\");\n } else {\n var lastVoted = vote.val();\n if (optionUid == lastVoted){\n DB.child(\"questions/\"+questionUid+\"/options/\"+optionUid+\"/votes\").transaction(function(currentVote){\n return currentVote -1;\n })\n DB.child(\"questions/\"+questionUid+\"/simpleVoting/\"+userUuid).remove();\n\n $(\".voteBtn\").css(\"border\" , \"0px solid black\");\n } else {\n DB.child(\"questions/\"+questionUid+\"/options/\"+lastVoted+\"/votes\").transaction(function(currentVote){\n return currentVote -1;\n });\n DB.child(\"questions/\"+questionUid+\"/options/\"+optionUid+\"/votes\").transaction(function(currentVote){\n return currentVote +1;\n })\n DB.child(\"questions/\"+questionUid+\"/simpleVoting/\"+userUuid).set(optionUid);\n $(\".voteBtn\").css(\"border\" , \"0px solid black\");\n $(\"#\"+optionUid+\"_btn\").css(\"border\" , \"3px solid black\");\n }\n\n }\n })\n}\n\nfunction lightCheckedBtn(questionUid){\n DB.child(\"questions/\"+questionUid+\"/simpleVoting/\"+userUuid).once(\"value\", function(checkedOption){\n\n $(\".voteBtn\").css(\"border\" , \"0px solid black\");\n $(\"#\"+checkedOption.val()+\"_btn\").css(\"border\" , \"3px solid black\");\n })\n}\n\nfunction listenToLimitedOptions (optionsObject, questionDB){\n\n for (i in optionsObject){\n questionDB.child(optionsObject[i].uuid).on(\"value\",function(optionVote){\n\n optionsObject[optionVote.key].votes = optionVote.val().votes;\n adjustHighetLimitedOptions(optionsObject);\n\n })\n }\n}\n\nfunction adjustHighetLimitedOptions(optionsObject){\n //look for max votes\n var maxVotes = 20;\n for (i in optionsObject){\n if (optionsObject[i].votes > maxVotes){\n maxVotes = optionsObject[i].votes;\n }\n }\n //find the dimensions of the wrapper to adjust drawing\n\n var NumberOfOptionsActualy = Object.keys(optionsObject).length;\n var divBarWidth = $(\"wrapper\").width()/NumberOfOptionsActualy;\n var barWidth = 0.8*divBarWidth;\n\n var wrapperDimensions = new Object();\n var wrapperHeight = $(\"wrapper\").height() - $(\"footer\").height()-20;\n\n for (i in optionsObject){\n var relativeToMaxBar;\n if (optionsObject[i].votes==undefined||optionsObject[i].votes<=0){\n relativeToMaxBar=0.1/maxVotes;\n } else{\n relativeToMaxBar = (optionsObject[i].votes/maxVotes);\n }\n\n $(\"#\"+optionsObject[i].uuid+\"_div\").css('height', wrapperHeight*relativeToMaxBar).css(\"width\", barWidth).text(optionsObject[i].votes);\n $(\"#\"+optionsObject[i].uuid+\"_btn\").css(\"background-color\", optionsObject[i].color);\n }\n}\n", "meta": {"content_hash": "4fe37c240353a5a86b6ee95c43ba6a07", "timestamp": "", "source": "github", "line_count": 164, "max_line_length": 195, "avg_line_length": 35.98780487804878, "alnum_prop": 0.6104710267705863, "repo_name": "delib-org/delib-fron2", "id": "9c5413af8e60915768c1f0f7671d4c1b3b78ddfd", "size": "5902", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "public/js/questions/limitedOptions.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "439329"}, {"name": "HTML", "bytes": "23894"}, {"name": "JavaScript", "bytes": "1157562"}]}} {"text": "\n\n/* Layout helpers\n----------------------------------*/\n.ui-helper-hidden {\n\tdisplay: none;\n}\n.ui-helper-hidden-accessible {\n\tborder: 0;\n\tclip: rect(0 0 0 0);\n\theight: 1px;\n\tmargin: -1px;\n\toverflow: hidden;\n\tpadding: 0;\n\tposition: absolute;\n\twidth: 1px;\n}\n.ui-helper-reset {\n\tmargin: 0;\n\tpadding: 0;\n\tborder: 0;\n\toutline: 0;\n\tline-height: 1.3;\n\ttext-decoration: none;\n\tfont-size: 100%;\n\tlist-style: none;\n}\n.ui-helper-clearfix:before,\n.ui-helper-clearfix:after {\n\tcontent: \"\";\n\tdisplay: table;\n\tborder-collapse: collapse;\n}\n.ui-helper-clearfix:after {\n\tclear: both;\n}\n.ui-helper-clearfix {\n\tmin-height: 0; /* support: IE7 */\n}\n.ui-helper-zfix {\n\twidth: 100%;\n\theight: 100%;\n\ttop: 0;\n\tleft: 0;\n\tposition: absolute;\n\topacity: 0;\n\tfilter:Alpha(Opacity=0);\n}\n\n.ui-front {\n\tz-index: 100;\n}\n\n\n/* Interaction Cues\n----------------------------------*/\n.ui-state-disabled {\n\tcursor: default !important;\n}\n\n\n/* Icons\n----------------------------------*/\n\n/* states and images */\n.ui-icon {\n\tdisplay: block;\n\ttext-indent: -99999px;\n\toverflow: hidden;\n\tbackground-repeat: no-repeat;\n}\n\n\n/* Misc visuals\n----------------------------------*/\n\n/* Overlays */\n.ui-widget-overlay {\n\tposition: fixed;\n\ttop: 0;\n\tleft: 0;\n\twidth: 100%;\n\theight: 100%;\n}\n.ui-resizable {\n\tposition: relative;\n}\n.ui-resizable-handle {\n\tposition: absolute;\n\tfont-size: 0.1px;\n\tdisplay: block;\n}\n.ui-resizable-disabled .ui-resizable-handle,\n.ui-resizable-autohide .ui-resizable-handle {\n\tdisplay: none;\n}\n.ui-resizable-n {\n\tcursor: n-resize;\n\theight: 7px;\n\twidth: 100%;\n\ttop: -5px;\n\tleft: 0;\n}\n.ui-resizable-s {\n\tcursor: s-resize;\n\theight: 7px;\n\twidth: 100%;\n\tbottom: -5px;\n\tleft: 0;\n}\n.ui-resizable-e {\n\tcursor: e-resize;\n\twidth: 7px;\n\tright: -5px;\n\ttop: 0;\n\theight: 100%;\n}\n.ui-resizable-w {\n\tcursor: w-resize;\n\twidth: 7px;\n\tleft: -5px;\n\ttop: 0;\n\theight: 100%;\n}\n.ui-resizable-se {\n\tcursor: se-resize;\n\twidth: 12px;\n\theight: 12px;\n\tright: 1px;\n\tbottom: 1px;\n}\n.ui-resizable-sw {\n\tcursor: sw-resize;\n\twidth: 9px;\n\theight: 9px;\n\tleft: -5px;\n\tbottom: -5px;\n}\n.ui-resizable-nw {\n\tcursor: nw-resize;\n\twidth: 9px;\n\theight: 9px;\n\tleft: -5px;\n\ttop: -5px;\n}\n.ui-resizable-ne {\n\tcursor: ne-resize;\n\twidth: 9px;\n\theight: 9px;\n\tright: -5px;\n\ttop: -5px;\n}\n.ui-button {\n\tdisplay: inline-block;\n\tposition: relative;\n\tpadding: 0;\n\tline-height: normal;\n\tmargin-right: .1em;\n\tcursor: pointer;\n\tvertical-align: middle;\n\ttext-align: center;\n\toverflow: visible; /* removes extra width in IE */\n}\n.ui-button,\n.ui-button:link,\n.ui-button:visited,\n.ui-button:hover,\n.ui-button:active {\n\ttext-decoration: none;\n}\n/* to make room for the icon, a width needs to be set here */\n.ui-button-icon-only {\n\twidth: 2.2em;\n}\n/* button elements seem to need a little more width */\nbutton.ui-button-icon-only {\n\twidth: 2.4em;\n}\n.ui-button-icons-only {\n\twidth: 3.4em;\n}\nbutton.ui-button-icons-only {\n\twidth: 3.7em;\n}\n\n/* button text element */\n.ui-button .ui-button-text {\n\tdisplay: block;\n\tline-height: normal;\n}\n.ui-button-text-only .ui-button-text {\n\tpadding: .4em 1em;\n}\n.ui-button-icon-only .ui-button-text,\n.ui-button-icons-only .ui-button-text {\n\tpadding: .4em;\n\ttext-indent: -9999999px;\n}\n.ui-button-text-icon-primary .ui-button-text,\n.ui-button-text-icons .ui-button-text {\n\tpadding: .4em 1em .4em 2.1em;\n}\n.ui-button-text-icon-secondary .ui-button-text,\n.ui-button-text-icons .ui-button-text {\n\tpadding: .4em 2.1em .4em 1em;\n}\n.ui-button-text-icons .ui-button-text {\n\tpadding-left: 2.1em;\n\tpadding-right: 2.1em;\n}\n/* no icon support for input elements, provide padding by default */\ninput.ui-button {\n\tpadding: .4em 1em;\n}\n\n/* button icon element(s) */\n.ui-button-icon-only .ui-icon,\n.ui-button-text-icon-primary .ui-icon,\n.ui-button-text-icon-secondary .ui-icon,\n.ui-button-text-icons .ui-icon,\n.ui-button-icons-only .ui-icon {\n\tposition: absolute;\n\ttop: 50%;\n\tmargin-top: -8px;\n}\n.ui-button-icon-only .ui-icon {\n\tleft: 50%;\n\tmargin-left: -8px;\n}\n.ui-button-text-icon-primary .ui-button-icon-primary,\n.ui-button-text-icons .ui-button-icon-primary,\n.ui-button-icons-only .ui-button-icon-primary {\n\tleft: .5em;\n}\n.ui-button-text-icon-secondary .ui-button-icon-secondary,\n.ui-button-text-icons .ui-button-icon-secondary,\n.ui-button-icons-only .ui-button-icon-secondary {\n\tright: .5em;\n}\n\n/* button sets */\n.ui-buttonset {\n\tmargin-right: 7px;\n}\n.ui-buttonset .ui-button {\n\tmargin-left: 0;\n\tmargin-right: -.3em;\n}\n\n/* workarounds */\n/* reset extra padding in Firefox, see h5bp.com/l */\ninput.ui-button::-moz-focus-inner,\nbutton.ui-button::-moz-focus-inner {\n\tborder: 0;\n\tpadding: 0;\n}\n.ui-dialog {\n\tposition: absolute;\n\ttop: 0;\n\tleft: 0;\n\tpadding: .2em;\n\toutline: 0;\n}\n.ui-dialog .ui-dialog-titlebar {\n\tpadding: .4em 1em;\n\tposition: relative;\n}\n.ui-dialog .ui-dialog-title {\n\tfloat: left;\n\tmargin: .1em 0;\n\twhite-space: nowrap;\n\twidth: 90%;\n\toverflow: hidden;\n\ttext-overflow: ellipsis;\n}\n.ui-dialog .ui-dialog-titlebar-close {\n\tposition: absolute;\n\tright: .3em;\n\ttop: 50%;\n\twidth: 21px;\n\tmargin: -10px 0 0 0;\n\tpadding: 1px;\n\theight: 20px;\n}\n.ui-dialog .ui-dialog-content {\n\tposition: relative;\n\tborder: 0;\n\tpadding: .5em 1em;\n\tbackground: none;\n\toverflow: auto;\n}\n.ui-dialog .ui-dialog-buttonpane {\n\ttext-align: left;\n\tborder-width: 1px 0 0 0;\n\tbackground-image: none;\n\tmargin-top: .5em;\n\tpadding: .3em 1em .5em .4em;\n}\n.ui-dialog .ui-dialog-buttonpane .ui-dialog-buttonset {\n\tfloat: right;\n}\n.ui-dialog .ui-dialog-buttonpane button {\n\tmargin: .5em .4em .5em 0;\n\tcursor: pointer;\n}\n.ui-dialog .ui-resizable-se {\n\twidth: 12px;\n\theight: 12px;\n\tright: -5px;\n\tbottom: -5px;\n\tbackground-position: 16px 16px;\n}\n.ui-draggable .ui-dialog-titlebar {\n\tcursor: move;\n}\n.ui-tooltip {\n\tpadding: 8px;\n\tposition: absolute;\n\tz-index: 9999;\n\tmax-width: 300px;\n\t-webkit-box-shadow: 0 0 5px #aaa;\n\tbox-shadow: 0 0 5px #aaa;\n}\nbody .ui-tooltip {\n\tborder-width: 2px;\n}\n\n/* Component containers\n----------------------------------*/\n.ui-widget {\n\tfont-family: Verdana,Arial,sans-serif;\n\tfont-size: 1.1em;\n}\n.ui-widget .ui-widget {\n\tfont-size: 1em;\n}\n.ui-widget input,\n.ui-widget select,\n.ui-widget textarea,\n.ui-widget button {\n\tfont-family: Verdana,Arial,sans-serif;\n\tfont-size: 1em;\n}\n.ui-widget-content {\n\tborder: 1px solid #aaaaaa;\n\tbackground: #ffffff url(images/ui-bg_flat_75_ffffff_40x100.png) 50% 50% repeat-x;\n\tcolor: #222222;\n}\n.ui-widget-content a {\n\tcolor: #222222;\n}\n.ui-widget-header {\n\tborder: 1px solid #aaaaaa;\n\tbackground: #cccccc url(images/ui-bg_highlight-soft_75_cccccc_1x100.png) 50% 50% repeat-x;\n\tcolor: #222222;\n\tfont-weight: bold;\n}\n.ui-widget-header a {\n\tcolor: #222222;\n}\n\n/* Interaction states\n----------------------------------*/\n.ui-state-default,\n.ui-widget-content .ui-state-default,\n.ui-widget-header .ui-state-default {\n\tborder: 1px solid #d3d3d3;\n\tbackground: #e6e6e6 url(images/ui-bg_glass_75_e6e6e6_1x400.png) 50% 50% repeat-x;\n\tfont-weight: normal;\n\tcolor: #555555;\n}\n.ui-state-default a,\n.ui-state-default a:link,\n.ui-state-default a:visited {\n\tcolor: #555555;\n\ttext-decoration: none;\n}\n.ui-state-hover,\n.ui-widget-content .ui-state-hover,\n.ui-widget-header .ui-state-hover,\n.ui-state-focus,\n.ui-widget-content .ui-state-focus,\n.ui-widget-header .ui-state-focus {\n\tborder: 1px solid #999999;\n\tbackground: #dadada url(images/ui-bg_glass_75_dadada_1x400.png) 50% 50% repeat-x;\n\tfont-weight: normal;\n\tcolor: #212121;\n}\n.ui-state-hover a,\n.ui-state-hover a:hover,\n.ui-state-hover a:link,\n.ui-state-hover a:visited {\n\tcolor: #212121;\n\ttext-decoration: none;\n}\n.ui-state-active,\n.ui-widget-content .ui-state-active,\n.ui-widget-header .ui-state-active {\n\tborder: 1px solid #aaaaaa;\n\tbackground: #ffffff url(images/ui-bg_glass_65_ffffff_1x400.png) 50% 50% repeat-x;\n\tfont-weight: normal;\n\tcolor: #212121;\n}\n.ui-state-active a,\n.ui-state-active a:link,\n.ui-state-active a:visited {\n\tcolor: #212121;\n\ttext-decoration: none;\n}\n\n/* Interaction Cues\n----------------------------------*/\n.ui-state-highlight,\n.ui-widget-content .ui-state-highlight,\n.ui-widget-header .ui-state-highlight {\n\tborder: 1px solid #fcefa1;\n\tbackground: #fbf9ee url(images/ui-bg_glass_55_fbf9ee_1x400.png) 50% 50% repeat-x;\n\tcolor: #363636;\n}\n.ui-state-highlight a,\n.ui-widget-content .ui-state-highlight a,\n.ui-widget-header .ui-state-highlight a {\n\tcolor: #363636;\n}\n.ui-state-error,\n.ui-widget-content .ui-state-error,\n.ui-widget-header .ui-state-error {\n\tborder: 1px solid #cd0a0a;\n\tbackground: #fef1ec url(images/ui-bg_glass_95_fef1ec_1x400.png) 50% 50% repeat-x;\n\tcolor: #cd0a0a;\n}\n.ui-state-error a,\n.ui-widget-content .ui-state-error a,\n.ui-widget-header .ui-state-error a {\n\tcolor: #cd0a0a;\n}\n.ui-state-error-text,\n.ui-widget-content .ui-state-error-text,\n.ui-widget-header .ui-state-error-text {\n\tcolor: #cd0a0a;\n}\n.ui-priority-primary,\n.ui-widget-content .ui-priority-primary,\n.ui-widget-header .ui-priority-primary {\n\tfont-weight: bold;\n}\n.ui-priority-secondary,\n.ui-widget-content .ui-priority-secondary,\n.ui-widget-header .ui-priority-secondary {\n\topacity: .7;\n\tfilter:Alpha(Opacity=70);\n\tfont-weight: normal;\n}\n.ui-state-disabled,\n.ui-widget-content .ui-state-disabled,\n.ui-widget-header .ui-state-disabled {\n\topacity: .35;\n\tfilter:Alpha(Opacity=35);\n\tbackground-image: none;\n}\n.ui-state-disabled .ui-icon {\n\tfilter:Alpha(Opacity=35); /* For IE8 - See #6059 */\n}\n\n/* Icons\n----------------------------------*/\n\n/* states and images */\n.ui-icon {\n\twidth: 16px;\n\theight: 16px;\n}\n.ui-icon,\n.ui-widget-content .ui-icon {\n\tbackground-image: url(images/ui-icons_222222_256x240.png);\n}\n.ui-widget-header .ui-icon {\n\tbackground-image: url(images/ui-icons_222222_256x240.png);\n}\n.ui-state-default .ui-icon {\n\tbackground-image: url(images/ui-icons_888888_256x240.png);\n}\n.ui-state-hover .ui-icon,\n.ui-state-focus .ui-icon {\n\tbackground-image: url(images/ui-icons_454545_256x240.png);\n}\n.ui-state-active .ui-icon {\n\tbackground-image: url(images/ui-icons_454545_256x240.png);\n}\n.ui-state-highlight .ui-icon {\n\tbackground-image: url(images/ui-icons_2e83ff_256x240.png);\n}\n.ui-state-error .ui-icon,\n.ui-state-error-text .ui-icon {\n\tbackground-image: url(images/ui-icons_cd0a0a_256x240.png);\n}\n\n/* positioning */\n.ui-icon-blank { background-position: 16px 16px; }\n.ui-icon-carat-1-n { background-position: 0 0; }\n.ui-icon-carat-1-ne { background-position: -16px 0; }\n.ui-icon-carat-1-e { background-position: -32px 0; }\n.ui-icon-carat-1-se { background-position: -48px 0; }\n.ui-icon-carat-1-s { background-position: -64px 0; }\n.ui-icon-carat-1-sw { background-position: -80px 0; }\n.ui-icon-carat-1-w { background-position: -96px 0; }\n.ui-icon-carat-1-nw { background-position: -112px 0; }\n.ui-icon-carat-2-n-s { background-position: -128px 0; }\n.ui-icon-carat-2-e-w { background-position: -144px 0; }\n.ui-icon-triangle-1-n { background-position: 0 -16px; }\n.ui-icon-triangle-1-ne { background-position: -16px -16px; }\n.ui-icon-triangle-1-e { background-position: -32px -16px; }\n.ui-icon-triangle-1-se { background-position: -48px -16px; }\n.ui-icon-triangle-1-s { background-position: -64px -16px; }\n.ui-icon-triangle-1-sw { background-position: -80px -16px; }\n.ui-icon-triangle-1-w { background-position: -96px -16px; }\n.ui-icon-triangle-1-nw { background-position: -112px -16px; }\n.ui-icon-triangle-2-n-s { background-position: -128px -16px; }\n.ui-icon-triangle-2-e-w { background-position: -144px -16px; }\n.ui-icon-arrow-1-n { background-position: 0 -32px; }\n.ui-icon-arrow-1-ne { background-position: -16px -32px; }\n.ui-icon-arrow-1-e { background-position: -32px -32px; }\n.ui-icon-arrow-1-se { background-position: -48px -32px; }\n.ui-icon-arrow-1-s { background-position: -64px -32px; }\n.ui-icon-arrow-1-sw { background-position: -80px -32px; }\n.ui-icon-arrow-1-w { background-position: -96px -32px; }\n.ui-icon-arrow-1-nw { background-position: -112px -32px; }\n.ui-icon-arrow-2-n-s { background-position: -128px -32px; }\n.ui-icon-arrow-2-ne-sw { background-position: -144px -32px; }\n.ui-icon-arrow-2-e-w { background-position: -160px -32px; }\n.ui-icon-arrow-2-se-nw { background-position: -176px -32px; }\n.ui-icon-arrowstop-1-n { background-position: -192px -32px; }\n.ui-icon-arrowstop-1-e { background-position: -208px -32px; }\n.ui-icon-arrowstop-1-s { background-position: -224px -32px; }\n.ui-icon-arrowstop-1-w { background-position: -240px -32px; }\n.ui-icon-arrowthick-1-n { background-position: 0 -48px; }\n.ui-icon-arrowthick-1-ne { background-position: -16px -48px; }\n.ui-icon-arrowthick-1-e { background-position: -32px -48px; }\n.ui-icon-arrowthick-1-se { background-position: -48px -48px; }\n.ui-icon-arrowthick-1-s { background-position: -64px -48px; }\n.ui-icon-arrowthick-1-sw { background-position: -80px -48px; }\n.ui-icon-arrowthick-1-w { background-position: -96px -48px; }\n.ui-icon-arrowthick-1-nw { background-position: -112px -48px; }\n.ui-icon-arrowthick-2-n-s { background-position: -128px -48px; }\n.ui-icon-arrowthick-2-ne-sw { background-position: -144px -48px; }\n.ui-icon-arrowthick-2-e-w { background-position: -160px -48px; }\n.ui-icon-arrowthick-2-se-nw { background-position: -176px -48px; }\n.ui-icon-arrowthickstop-1-n { background-position: -192px -48px; }\n.ui-icon-arrowthickstop-1-e { background-position: -208px -48px; }\n.ui-icon-arrowthickstop-1-s { background-position: -224px -48px; }\n.ui-icon-arrowthickstop-1-w { background-position: -240px -48px; }\n.ui-icon-arrowreturnthick-1-w { background-position: 0 -64px; }\n.ui-icon-arrowreturnthick-1-n { background-position: -16px -64px; }\n.ui-icon-arrowreturnthick-1-e { background-position: -32px -64px; }\n.ui-icon-arrowreturnthick-1-s { background-position: -48px -64px; }\n.ui-icon-arrowreturn-1-w { background-position: -64px -64px; }\n.ui-icon-arrowreturn-1-n { background-position: -80px -64px; }\n.ui-icon-arrowreturn-1-e { background-position: -96px -64px; }\n.ui-icon-arrowreturn-1-s { background-position: -112px -64px; }\n.ui-icon-arrowrefresh-1-w { background-position: -128px -64px; }\n.ui-icon-arrowrefresh-1-n { background-position: -144px -64px; }\n.ui-icon-arrowrefresh-1-e { background-position: -160px -64px; }\n.ui-icon-arrowrefresh-1-s { background-position: -176px -64px; }\n.ui-icon-arrow-4 { background-position: 0 -80px; }\n.ui-icon-arrow-4-diag { background-position: -16px -80px; }\n.ui-icon-extlink { background-position: -32px -80px; }\n.ui-icon-newwin { background-position: -48px -80px; }\n.ui-icon-refresh { background-position: -64px -80px; }\n.ui-icon-shuffle { background-position: -80px -80px; }\n.ui-icon-transfer-e-w { background-position: -96px -80px; }\n.ui-icon-transferthick-e-w { background-position: -112px -80px; }\n.ui-icon-folder-collapsed { background-position: 0 -96px; }\n.ui-icon-folder-open { background-position: -16px -96px; }\n.ui-icon-document { background-position: -32px -96px; }\n.ui-icon-document-b { background-position: -48px -96px; }\n.ui-icon-note { background-position: -64px -96px; }\n.ui-icon-mail-closed { background-position: -80px -96px; }\n.ui-icon-mail-open { background-position: -96px -96px; }\n.ui-icon-suitcase { background-position: -112px -96px; }\n.ui-icon-comment { background-position: -128px -96px; }\n.ui-icon-person { background-position: -144px -96px; }\n.ui-icon-print { background-position: -160px -96px; }\n.ui-icon-trash { background-position: -176px -96px; }\n.ui-icon-locked { background-position: -192px -96px; }\n.ui-icon-unlocked { background-position: -208px -96px; }\n.ui-icon-bookmark { background-position: -224px -96px; }\n.ui-icon-tag { background-position: -240px -96px; }\n.ui-icon-home { background-position: 0 -112px; }\n.ui-icon-flag { background-position: -16px -112px; }\n.ui-icon-calendar { background-position: -32px -112px; }\n.ui-icon-cart { background-position: -48px -112px; }\n.ui-icon-pencil { background-position: -64px -112px; }\n.ui-icon-clock { background-position: -80px -112px; }\n.ui-icon-disk { background-position: -96px -112px; }\n.ui-icon-calculator { background-position: -112px -112px; }\n.ui-icon-zoomin { background-position: -128px -112px; }\n.ui-icon-zoomout { background-position: -144px -112px; }\n.ui-icon-search { background-position: -160px -112px; }\n.ui-icon-wrench { background-position: -176px -112px; }\n.ui-icon-gear { background-position: -192px -112px; }\n.ui-icon-heart { background-position: -208px -112px; }\n.ui-icon-star { background-position: -224px -112px; }\n.ui-icon-link { background-position: -240px -112px; }\n.ui-icon-cancel { background-position: 0 -128px; }\n.ui-icon-plus { background-position: -16px -128px; }\n.ui-icon-plusthick { background-position: -32px -128px; }\n.ui-icon-minus { background-position: -48px -128px; }\n.ui-icon-minusthick { background-position: -64px -128px; }\n.ui-icon-close { background-position: -80px -128px; }\n.ui-icon-closethick { background-position: -96px -128px; }\n.ui-icon-key { background-position: -112px -128px; }\n.ui-icon-lightbulb { background-position: -128px -128px; }\n.ui-icon-scissors { background-position: -144px -128px; }\n.ui-icon-clipboard { background-position: -160px -128px; }\n.ui-icon-copy { background-position: -176px -128px; }\n.ui-icon-contact { background-position: -192px -128px; }\n.ui-icon-image { background-position: -208px -128px; }\n.ui-icon-video { background-position: -224px -128px; }\n.ui-icon-script { background-position: -240px -128px; }\n.ui-icon-alert { background-position: 0 -144px; }\n.ui-icon-info { background-position: -16px -144px; }\n.ui-icon-notice { background-position: -32px -144px; }\n.ui-icon-help { background-position: -48px -144px; }\n.ui-icon-check { background-position: -64px -144px; }\n.ui-icon-bullet { background-position: -80px -144px; }\n.ui-icon-radio-on { background-position: -96px -144px; }\n.ui-icon-radio-off { background-position: -112px -144px; }\n.ui-icon-pin-w { background-position: -128px -144px; }\n.ui-icon-pin-s { background-position: -144px -144px; }\n.ui-icon-play { background-position: 0 -160px; }\n.ui-icon-pause { background-position: -16px -160px; }\n.ui-icon-seek-next { background-position: -32px -160px; }\n.ui-icon-seek-prev { background-position: -48px -160px; }\n.ui-icon-seek-end { background-position: -64px -160px; }\n.ui-icon-seek-start { background-position: -80px -160px; }\n/* ui-icon-seek-first is deprecated, use ui-icon-seek-start instead */\n.ui-icon-seek-first { background-position: -80px -160px; }\n.ui-icon-stop { background-position: -96px -160px; }\n.ui-icon-eject { background-position: -112px -160px; }\n.ui-icon-volume-off { background-position: -128px -160px; }\n.ui-icon-volume-on { background-position: -144px -160px; }\n.ui-icon-power { background-position: 0 -176px; }\n.ui-icon-signal-diag { background-position: -16px -176px; }\n.ui-icon-signal { background-position: -32px -176px; }\n.ui-icon-battery-0 { background-position: -48px -176px; }\n.ui-icon-battery-1 { background-position: -64px -176px; }\n.ui-icon-battery-2 { background-position: -80px -176px; }\n.ui-icon-battery-3 { background-position: -96px -176px; }\n.ui-icon-circle-plus { background-position: 0 -192px; }\n.ui-icon-circle-minus { background-position: -16px -192px; }\n.ui-icon-circle-close { background-position: -32px -192px; }\n.ui-icon-circle-triangle-e { background-position: -48px -192px; }\n.ui-icon-circle-triangle-s { background-position: -64px -192px; }\n.ui-icon-circle-triangle-w { background-position: -80px -192px; }\n.ui-icon-circle-triangle-n { background-position: -96px -192px; }\n.ui-icon-circle-arrow-e { background-position: -112px -192px; }\n.ui-icon-circle-arrow-s { background-position: -128px -192px; }\n.ui-icon-circle-arrow-w { background-position: -144px -192px; }\n.ui-icon-circle-arrow-n { background-position: -160px -192px; }\n.ui-icon-circle-zoomin { background-position: -176px -192px; }\n.ui-icon-circle-zoomout { background-position: -192px -192px; }\n.ui-icon-circle-check { background-position: -208px -192px; }\n.ui-icon-circlesmall-plus { background-position: 0 -208px; }\n.ui-icon-circlesmall-minus { background-position: -16px -208px; }\n.ui-icon-circlesmall-close { background-position: -32px -208px; }\n.ui-icon-squaresmall-plus { background-position: -48px -208px; }\n.ui-icon-squaresmall-minus { background-position: -64px -208px; }\n.ui-icon-squaresmall-close { background-position: -80px -208px; }\n.ui-icon-grip-dotted-vertical { background-position: 0 -224px; }\n.ui-icon-grip-dotted-horizontal { background-position: -16px -224px; }\n.ui-icon-grip-solid-vertical { background-position: -32px -224px; }\n.ui-icon-grip-solid-horizontal { background-position: -48px -224px; }\n.ui-icon-gripsmall-diagonal-se { background-position: -64px -224px; }\n.ui-icon-grip-diagonal-se { background-position: -80px -224px; }\n\n\n/* Misc visuals\n----------------------------------*/\n\n/* Corner radius */\n.ui-corner-all,\n.ui-corner-top,\n.ui-corner-left,\n.ui-corner-tl {\n\tborder-top-left-radius: 4px;\n}\n.ui-corner-all,\n.ui-corner-top,\n.ui-corner-right,\n.ui-corner-tr {\n\tborder-top-right-radius: 4px;\n}\n.ui-corner-all,\n.ui-corner-bottom,\n.ui-corner-left,\n.ui-corner-bl {\n\tborder-bottom-left-radius: 4px;\n}\n.ui-corner-all,\n.ui-corner-bottom,\n.ui-corner-right,\n.ui-corner-br {\n\tborder-bottom-right-radius: 4px;\n}\n\n/* Overlays */\n.ui-widget-overlay {\n\tbackground: #aaaaaa url(images/ui-bg_flat_0_aaaaaa_40x100.png) 50% 50% repeat-x;\n\topacity: .3;\n\tfilter: Alpha(Opacity=30);\n}\n.ui-widget-shadow {\n\tmargin: -8px 0 0 -8px;\n\tpadding: 8px;\n\tbackground: #aaaaaa url(images/ui-bg_flat_0_aaaaaa_40x100.png) 50% 50% repeat-x;\n\topacity: .3;\n\tfilter: Alpha(Opacity=30);\n\tborder-radius: 8px;\n}\n", "meta": {"content_hash": "191522e64e11f60c4a079dade6237c16", "timestamp": "", "source": "github", "line_count": 719, "max_line_length": 91, "avg_line_length": 29.581363004172463, "alnum_prop": 0.6894541351262401, "repo_name": "ZeusWPI/FK-enrolment", "id": "0d81a92f50179d23db780f8a1e26de9633d09710", "size": "22864", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vendor/assets/stylesheets/jquery-ui.custom.css", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "11594"}, {"name": "HTML", "bytes": "41784"}, {"name": "JavaScript", "bytes": "2088"}, {"name": "Ruby", "bytes": "140042"}]}} {"text": "\n#pragma once\n\n#include \n#include \n#include \n#include \n\n__BEGIN_CDECLS\n\n#define MUTEX_MAGIC (0x6D757478) // 'mutx'\n\ntypedef struct mutex {\n uint32_t magic;\n thread_t *holder;\n int count;\n wait_queue_t wait;\n} mutex_t;\n\n#define MUTEX_INITIAL_VALUE(m) \\\n{ \\\n .magic = MUTEX_MAGIC, \\\n .holder = NULL, \\\n .count = 0, \\\n .wait = WAIT_QUEUE_INITIAL_VALUE((m).wait), \\\n}\n\n/* Rules for Mutexes:\n * - Mutexes are only safe to use from thread context.\n * - Mutexes are non-recursive.\n*/\n\nvoid mutex_init(mutex_t *);\nvoid mutex_destroy(mutex_t *);\nstatus_t mutex_acquire_timeout(mutex_t *, lk_time_t); /* try to acquire the mutex with a timeout value */\nstatus_t mutex_release(mutex_t *);\n\nstatic inline status_t mutex_acquire(mutex_t *m) {\n return mutex_acquire_timeout(m, INFINITE_TIME);\n}\n\n/* does the current thread hold the mutex? */\nstatic bool is_mutex_held(mutex_t *m) {\n return m->holder == get_current_thread();\n}\n\n__END_CDECLS\n", "meta": {"content_hash": "3ea7f0a02373e74ad276588c857845ef", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 105, "avg_line_length": 21.53191489361702, "alnum_prop": 0.6590909090909091, "repo_name": "travisg/lk", "id": "b0182dcad4b7a19644c5503c40752c22cfea84f9", "size": "1253", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "kernel/include/kernel/mutex.h", "mode": "33188", "license": "mit", "language": [{"name": "Assembly", "bytes": "155785"}, {"name": "C", "bytes": "3642589"}, {"name": "C++", "bytes": "237085"}, {"name": "Makefile", "bytes": "97565"}, {"name": "Objective-C", "bytes": "12298"}, {"name": "Python", "bytes": "4345"}, {"name": "Shell", "bytes": "10818"}, {"name": "Tcl", "bytes": "288"}]}} {"text": "UIMiniWindowContainer = extends(UIWidget, \"UIMiniWindowContainer\")\n\nfunction UIMiniWindowContainer.create()\n local container = UIMiniWindowContainer.internalCreate()\n container.scheduledWidgets = {}\n container:setFocusable(false)\n container:setPhantom(true)\n return container\nend\n\n-- TODO: connect to window onResize event\n-- TODO: try to resize another widget?\n-- TODO: try to find another panel?\nfunction UIMiniWindowContainer:fitAll(noRemoveChild)\n if not self:isVisible() then\n return\n end\n\n if not noRemoveChild then\n local children = self:getChildren()\n if #children > 0 then\n noRemoveChild = children[#children]\n else\n return\n end\n end\n\n local sumHeight = 0\n local children = self:getChildren()\n for i=1,#children do\n if children[i]:isVisible() then\n sumHeight = sumHeight + children[i]:getHeight()\n end\n end\n\n local selfHeight = self:getHeight() - (self:getPaddingTop() + self:getPaddingBottom())\n if sumHeight <= selfHeight then\n return\n end\n\n local removeChildren = {}\n\n -- try to resize noRemoveChild\n local maximumHeight = selfHeight - (sumHeight - noRemoveChild:getHeight())\n if noRemoveChild:isResizeable() and noRemoveChild:getMinimumHeight() <= maximumHeight then\n sumHeight = sumHeight - noRemoveChild:getHeight() + maximumHeight\n addEvent(function() noRemoveChild:setHeight(maximumHeight) end)\n end\n\n -- try to remove no-save widget\n for i=#children,1,-1 do\n if sumHeight <= selfHeight then\n break\n end\n\n local child = children[i]\n if child ~= noRemoveChild and not child.save then\n local childHeight = child:getHeight()\n sumHeight = sumHeight - childHeight\n table.insert(removeChildren, child)\n end\n end\n\n -- try to remove save widget\n for i=#children,1,-1 do\n if sumHeight <= selfHeight then\n break\n end\n\n local child = children[i]\n if child ~= noRemoveChild and child:isVisible() then\n local childHeight = child:getHeight()\n sumHeight = sumHeight - childHeight\n table.insert(removeChildren, child)\n end\n end\n\n -- close widgets\n for i=1,#removeChildren do\n removeChildren[i]:close()\n end\nend\n\nfunction UIMiniWindowContainer:onDrop(widget, mousePos)\n if widget:getClassName() == 'UIMiniWindow' then\n local oldParent = widget:getParent()\n if oldParent == self then\n return true\n end\n\n if oldParent then\n oldParent:removeChild(widget)\n end\n\n if widget.movedWidget then\n local index = self:getChildIndex(widget.movedWidget)\n self:insertChild(index + widget.movedIndex, widget)\n else\n self:addChild(widget)\n end\n\n self:fitAll(widget)\n return true\n end\nend\n\nfunction UIMiniWindowContainer:swapInsert(widget, index)\n local oldParent = widget:getParent()\n local oldIndex = self:getChildIndex(widget)\n\n if oldParent == self and oldIndex ~= index then\n local oldWidget = self:getChildByIndex(index)\n if oldWidget then\n self:removeChild(oldWidget)\n self:insertChild(oldIndex, oldWidget)\n end\n self:removeChild(widget)\n self:insertChild(index, widget)\n end\nend\n\nfunction UIMiniWindowContainer:scheduleInsert(widget, index)\n if index - 1 > self:getChildCount() then\n if self.scheduledWidgets[index] then\n pdebug('replacing scheduled widget id ' .. widget:getId())\n end\n self.scheduledWidgets[index] = widget\n else\n local oldParent = widget:getParent()\n if oldParent ~= self then\n if oldParent then\n oldParent:removeChild(widget)\n end\n self:insertChild(index, widget)\n\n while true do\n local placed = false\n for nIndex,nWidget in pairs(self.scheduledWidgets) do\n if nIndex - 1 <= self:getChildCount() then\n self:insertChild(nIndex, nWidget)\n self.scheduledWidgets[nIndex] = nil\n placed = true\n break\n end\n end\n if not placed then break end\n end\n\n end\n end\nend\n\nfunction UIMiniWindowContainer:order()\n local children = self:getChildren()\n for i=1,#children do\n if not children[i].miniLoaded then return end\n end\n\n for i=1,#children do\n if children[i].miniIndex then\n self:swapInsert(children[i], children[i].miniIndex)\n end\n end\nend\n\nfunction UIMiniWindowContainer:saveChildren()\n local children = self:getChildren()\n local ignoreIndex = 0\n for i=1,#children do\n if children[i].save then\n children[i]:saveParentIndex(self:getId(), i - ignoreIndex)\n else\n ignoreIndex = ignoreIndex + 1\n end\n end\nend\n", "meta": {"content_hash": "97c7c2107f2c4e082086aff8889ecfbf", "timestamp": "", "source": "github", "line_count": 176, "max_line_length": 92, "avg_line_length": 25.72159090909091, "alnum_prop": 0.6920698034018113, "repo_name": "Riverlance/kingdom-age-game-linux", "id": "a95841588f9ab6cb5bf9ebfbe7fa2cd8a7160a70", "size": "4540", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/corelib/ui/uiminiwindowcontainer.lua", "mode": "33188", "license": "mit", "language": []}} {"text": "ogex_int_array_t *ogex_int_array_create(int capacity)\n{\n ogex_int_array_t *array = calloc(1, sizeof(ogex_int_array_t));\n if (array == NULL) {\n return NULL;\n }\n\n vec_init(array);\n vec_reserve(array, capacity);\n\n return array;\n}\n\nvoid ogex_int_array_free(ogex_int_array_t *array)\n{\n assert(array != NULL);\n vec_deinit(array);\n free(array);\n}\n\nvoid ogex_int_array_destroy(ogex_int_array_t *array)\n{\n assert(array != NULL);\n vec_deinit(array);\n free(array);\n}\n\nint ogex_int_array_push(ogex_int_array_t *array, int value)\n{\n assert(array != NULL);\n int result = vec_push(array, value);\n return result != -1;\n}\n\nint ogex_int_array_get(ogex_int_array_t *array, int index)\n{\n assert(array != NULL);\n assert(index < 0 && index >= array->length);\n\n return vec_get(array, index);\n}\n\nint ogex_int_array_pop(ogex_int_array_t *array)\n{\n assert(array != NULL);\n return vec_pop(array);\n}\n\n", "meta": {"content_hash": "93c563d65b0f97e9bcda631bdb976f27", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 66, "avg_line_length": 19.583333333333332, "alnum_prop": 0.6212765957446809, "repo_name": "warmwaffles/openddl", "id": "fdca7e6ac1d9923e6f9258b94062f52b9a53e6b5", "size": "1013", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "opengex/src/opengex/int_array.c", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "147318"}, {"name": "CMake", "bytes": "1293"}, {"name": "Objective-C", "bytes": "501"}]}} {"text": "var React = require('react');\nvar mdlHandler = require('./mdlComponentHandler');\n\nfunction mdlUpgradable(spec) {\n\n var mdlPrefix = 'mdl-',\n jsPrefix = 'js-',\n displayName = spec.displayName,\n componentName = displayName.replace('Material', '');\n\n var upgradeMdlComponent = function (node) {\n mdlHandler.upgradeElement(node, displayName);\n };\n\n var downgradeMdlComponent = function(node) {\n mdlHandler.downgradeElements(node, displayName);\n };\n\n return React.createClass({\n\n displayName: displayName,\n\n propTypes: { className: React.PropTypes.string },\n\n getDefaultProps: function() {\n return { className: '' };\n },\n\n componentDidMount: function() {\n upgradeMdlComponent(this.getDOMNode());\n },\n\n componentWillUpdate: function() {\n downgradeMdlComponent(this.getDOMNode());\n },\n\n componentDidUpdate: function() {\n upgradeMdlComponent(this.getDOMNode());\n },\n\n componentWillUnmount: function() {\n downgradeMdlComponent(this.getDOMNode());\n },\n\n getUpgradedMDLClassList: function() {\n var cssClass = mdlPrefix + jsPrefix + componentName.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase(),\n baseClass = cssClass.replace(jsPrefix, '');\n\n return [baseClass, cssClass];\n },\n\n render: function() {\n\n var classList = this.props.className.split(' '),\n mdlClassList = this.getUpgradedMDLClassList();\n\n this.element = React.createElement(spec, this.props);\n\n return React.cloneElement(this.element, {\n className: mdlClassList.concat(classList).join(' ')\n });\n }\n\n });\n\n}\n\nmodule.exports = mdlUpgradable;\n", "meta": {"content_hash": "42adb3eb020dac4c91d3d2473d775452", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 108, "avg_line_length": 24.220588235294116, "alnum_prop": 0.6490588949605343, "repo_name": "EdStudio/react-mdlite", "id": "4b6bd5768c7399c8c84c538ad865561f68d8b748", "size": "1647", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/utils/mdlUpgradable.js", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "50429"}, {"name": "Shell", "bytes": "190"}]}} {"text": "/* Generic definitions */\n\n\n\n\n/* Assertions (useful to generate conditional code) */\n/* Current type and class (and size, if applicable) */\n/* Value methods */\n/* Interfaces (keys) */\n/* Interfaces (values) */\n/* Abstract implementations (keys) */\n/* Abstract implementations (values) */\n/* Static containers (keys) */\n/* Static containers (values) */\n/* Implementations */\n/* Synchronized wrappers */\n/* Unmodifiable wrappers */\n/* Other wrappers */\n/* Methods (keys) */\n/* Methods (values) */\n/* Methods (keys/values) */\n/* Methods that have special names depending on keys (but the special names depend on values) */\n/* Equality */\n/* Object/Reference-only definitions (keys) */\n/* Primitive-type-only definitions (keys) */\n/* Object/Reference-only definitions (values) */\n/* Primitive-type-only definitions (values) */\n/*\t\t \n * Copyright (C) 2002-2013 Sebastiano Vigna \n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License. \n */\npackage it.unimi.dsi.fastutil.ints;\nimport it.unimi.dsi.fastutil.chars.CharCollection;\nimport it.unimi.dsi.fastutil.objects.ObjectSet;\nimport it.unimi.dsi.fastutil.objects.ObjectIterator;\nimport java.util.Map;\n/** A type-specific {@link Map}; provides some additional methods that use polymorphism to avoid (un)boxing, and handling of a default return value.\n *\n *

Besides extending the corresponding type-specific {@linkplain it.unimi.dsi.fastutil.Function function}, this interface strengthens {@link #entrySet()},\n * {@link #keySet()} and {@link #values()}. Maps returning entry sets of type {@link FastEntrySet} support also fast iteration.\n *\n *

A submap or subset may or may not have an\n * independent default return value (which however must be initialized to the\n * default return value of the originator).\n *\n * @see Map\n */\npublic interface Int2CharMap extends Int2CharFunction , Map {\n /** An entry set providing fast iteration. \n\t *\n\t *

In some cases (e.g., hash-based classes) iteration over an entry set requires the creation\n\t * of a large number of {@link java.util.Map.Entry} objects. Some fastutil\n\t * maps might return {@linkplain #entrySet() entry set} objects of type FastEntrySet: in this case, {@link #fastIterator() fastIterator()}\n\t * will return an iterator that is guaranteed not to create a large number of objects, possibly\n\t * by returning always the same entry (of course, mutated).\n\t */\n public interface FastEntrySet extends ObjectSet {\n /** Returns a fast iterator over this entry set; the iterator might return always the same entry object, suitably mutated.\n\t\t *\n\t\t * @return a fast iterator over this entry set; the iterator might return always the same {@link java.util.Map.Entry} object, suitably mutated.\n\t\t */\n public ObjectIterator fastIterator();\n }\n /** Returns a set view of the mappings contained in this map.\n\t *

Note that this specification strengthens the one given in {@link Map#entrySet()}.\n\t *\n\t * @return a set view of the mappings contained in this map.\n\t * @see Map#entrySet()\n\t */\n ObjectSet> entrySet();\n /** Returns a type-specific set view of the mappings contained in this map.\n\t *\n\t *

This method is necessary because there is no inheritance along\n\t * type parameters: it is thus impossible to strengthen {@link #entrySet()}\n\t * so that it returns an {@link it.unimi.dsi.fastutil.objects.ObjectSet}\n\t * of objects of type {@link java.util.Map.Entry} (the latter makes it possible to\n\t * access keys and values with type-specific methods).\n\t *\n\t * @return a type-specific set view of the mappings contained in this map.\n\t * @see #entrySet()\n\t */\n ObjectSet int2CharEntrySet();\n /** Returns a set view of the keys contained in this map.\n\t *

Note that this specification strengthens the one given in {@link Map#keySet()}.\n\t *\n\t * @return a set view of the keys contained in this map.\n\t * @see Map#keySet()\n\t */\n IntSet keySet();\n /** Returns a set view of the values contained in this map.\n\t *

Note that this specification strengthens the one given in {@link Map#values()}.\n\t *\n\t * @return a set view of the values contained in this map.\n\t * @see Map#values()\n\t */\n CharCollection values();\n /**\n\t * @see Map#containsValue(Object)\n\t */\n boolean containsValue( char value );\n /** A type-specific {@link java.util.Map.Entry}; provides some additional methods\n\t * that use polymorphism to avoid (un)boxing.\n\t *\n\t * @see java.util.Map.Entry\n\t */\n interface Entry extends Map.Entry {\n\n /**\n\t\t * @see java.util.Map.Entry#getKey()\n\t\t */\n int getIntKey();\n\n\n\n /**\n\t\t * @see java.util.Map.Entry#setValue(Object)\n\t\t */\n char setValue(char value);\n\n /**\n\t\t * @see java.util.Map.Entry#getValue()\n\t\t */\n char getCharValue();\n\n\n }\n}\n", "meta": {"content_hash": "f7b493ffff62b2666644acc3e1429312", "timestamp": "", "source": "github", "line_count": 138, "max_line_length": 157, "avg_line_length": 38.48550724637681, "alnum_prop": 0.7122952363020146, "repo_name": "karussell/fastutil", "id": "c3d7d0f6fcb18d4600e1f865e62d92aa5142198b", "size": "5311", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/it/unimi/dsi/fastutil/ints/Int2CharMap.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "554418"}, {"name": "Shell", "bytes": "23387"}]}} {"text": "/*\n * To change this template, choose Tools | Templates\n * and open the template in the editor.\n */\npackage wts.models.DisMELS.IBMFunctions.Spawning;\n\nimport com.wtstockhausen.utils.RandomNumberGenerator;\nimport org.openide.util.lookup.ServiceProvider;\nimport org.openide.util.lookup.ServiceProviders;\nimport wts.models.DisMELS.framework.GlobalInfo;\nimport wts.models.DisMELS.framework.IBMFunctions.AbstractIBMFunction;\nimport wts.models.DisMELS.framework.IBMFunctions.IBMFunctionInterface;\nimport wts.models.DisMELS.framework.IBMFunctions.IBMParameter;\n\n/**\n * This class provides an implementation of batch spawning activity with\"\n * a poisson-distributed recovery period\n * Type: \n * spawning function\n * Parameters (by key):\n * minRecoveryPeriod - Double - minimum recovery period (d)\n * meaRecoveryPeriod - Double - mean recovery period (d)\n * randomize - Boolean - randomize recovery period\n * Variables:\n * vars - any Object or null\n * Value:\n * t - time to next spawning event(d)\n * t ~ Poisson[meanRP,1/(meanRP-minRP)]\n * \n * @author William.Stockhausen\n */\n@ServiceProviders(value={\n @ServiceProvider(service=IBMFunctionInterface.class)}\n)\npublic class BatchSpawningFunction extends AbstractIBMFunction {\n \n /** random number generator */\n protected static final RandomNumberGenerator rng = GlobalInfo.getInstance().getRandomNumberGenerator();\n \n /** function classification */\n public static final String DEFAULT_type = \"Spawning\";\n /** user-friendly function name */\n public static final String DEFAULT_name = \"batch spawning function\";\n /** function description */\n public static final String DEFAULT_descr = \"batch spawning function\";\n /** full description */\n public static final String DEFAULT_fullDescr = \n \"\\n\\t**************************************************************************\"+\n \"\\n\\t* This class provides an implementation of batch spawning activity with\"+\n \"\\n\\t* a poisson-distributed recovery period.\"+\n \"\\n\\t* Type: \"+\n \"\\n\\t* spawning function\"+\n \"\\n\\t* Parameters (by key):\"+\n \"\\n\\t* minRecoveryPeriod - Double - minimum recovery period (d)\"+\n \"\\n\\t* meaRecoveryPeriod - Double - mean recovery period (d)\"+\n \"\\n\\t* randomize - Boolean - randomize recovery period \"+\n \"\\n\\t* Variables:\"+\n \"\\n\\t* vars - any Object or null\"+\n \"\\n\\t* Value:\"+\n \"\\n\\t* t - time to next spawning event(d)\"+\n \"\\n\\t* t ~ Poisson[meanRP,1/(meanRP-minRP)]\"+\n \"\\n\\t* author: William.Stockhausen\"+\n \"\\n\\t**************************************************************************\";\n\n /** number of settable parameters */\n public static final int numParams = 2;\n /** number of sub-functions */\n public static final int numSubFuncs = 0;\n\n /** key to set min recovery period parameter */\n public static final String PARAM_minRecoveryPeriod = \"minimum recovery period (d)\";\n /** key to set mean recovery period parameter */\n public static final String PARAM_meanRecoveryPeriod = \"mean recovery period (d)\";\n /** key to set stochastic flag */\n public static final String PARAM_randomize = \"randomize recovery period\";\n \n /** value of min recovery period parameter */\n private double minRP = 0.0;\n /** value of mean recovery period parameter */\n private double meanRP = 0.0;\n /** value of mean recovery period parameter */\n private boolean randomize = false;\n \n /** constructor for class */\n public BatchSpawningFunction(){\n super(numParams,numSubFuncs,DEFAULT_type,DEFAULT_name,DEFAULT_descr,DEFAULT_fullDescr);\n String key; \n key = PARAM_minRecoveryPeriod; addParameter(key,Double.class, key);\n key = PARAM_meanRecoveryPeriod; addParameter(key,Double.class, key);\n key = PARAM_randomize; addParameter(key,Boolean.class, key); \n }\n \n @Override\n public BatchSpawningFunction clone(){\n BatchSpawningFunction clone = new BatchSpawningFunction();\n clone.setFunctionType(getFunctionType());\n clone.setFunctionName(getFunctionName());\n clone.setDescription(getDescription());\n clone.setFullDescription(getFullDescription());\n for (String key: getParameterNames()) {\n IBMParameter op = getParameter(key);\n IBMParameter np = clone.getParameter(key);\n np.setDescription(op.getDescription());\n np.setValue(op.getValue());\n }\n// for (String key: getSubfunctionNames()) clone.setSubfunction(key,(IBMFunctionInterface)getSubfunction(key).clone());\n return clone;\n }\n \n /**\n * Sets the parameter value corresponding to the key associated with param.\n * \n * @param param - the parameter key (name)\n * @param value - its value\n * @return \n */\n @Override\n public boolean setParameterValue(String param,Object value){\n if (super.setParameterValue(param, value)){\n switch (param) {\n case PARAM_minRecoveryPeriod:\n minRP = ((Double) value);\n break;\n case PARAM_meanRecoveryPeriod:\n meanRP = ((Double) value);\n break;\n case PARAM_randomize:\n randomize = ((Boolean) value);\n break;\n }\n return true;\n }\n return false;\n }\n\n /**\n * Calculates the value of the function, given the current parameters.\n * \n * @param vars - any Object or null\n * @return - Double - time to next spawning event\n */\n @Override\n public Double calculate(Object vars) {\n double res = meanRP;\n if (randomize){\n res = minRP-Math.log(rng.computeUniformVariate(0.0, 1.0))*(meanRP-minRP);\n }\n return res;\n }\n}\n", "meta": {"content_hash": "4d00d82f37cdf42637f7022e825081e9", "timestamp": "", "source": "github", "line_count": 150, "max_line_length": 126, "avg_line_length": 40.08, "alnum_prop": 0.6124417831004657, "repo_name": "wStockhausen/DisMELS", "id": "97e537bbb194da928e21b2550d55af66b4d558f1", "size": "6012", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "DisMELS_IBMFunctions/src/wts/models/DisMELS/IBMFunctions/Spawning/BatchSpawningFunction.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "3320511"}]}} {"text": "package com.entity;\n\nimport java.io.Serializable;\nimport java.util.Set;\n\npublic class Hall implements Serializable{\n/**\n\t * \n\t */\n\tprivate static final long serialVersionUID = -2972907068930528292L;\nprivate int hall_id;\nprivate String roomname;\nprivate String version;//\u653e\u6620\u7684\u7248\u672c \n//\u4e00\u4e2a\u64ad\u653e\u5385\u64ad\u653e\u591a\u4e2a\u573a\u6b21\u7684\u7535\u5f71\nprivate Set play;\nprivate int seat;//\u4e00\u4e2a\u653e\u6620\u5385\u7684\u5ea7\u4f4d\u6570\n\n\npublic int getSeat() {\n\treturn seat;\n}\npublic void setSeat(int seat) {\n\tthis.seat = seat;\n}\npublic String getVersion() {\n\treturn version;\n}\npublic void setVersion(String version) {\n\tthis.version = version;\n}\npublic Set getPlay() {\n\treturn play;\n}\npublic void setPlay(Set play) {\n\tthis.play = play;\n}\npublic String getRoomname() {\n\treturn roomname;\n}\npublic int getHall_id() {\n\treturn hall_id;\n}\npublic void setHall_id(int hall_id) {\n\tthis.hall_id = hall_id;\n}\npublic void setRoomname(String roomname) {\n\tthis.roomname = roomname;\n}\npublic Hall() {\n\tsuper();\n}\n\n\t\n}\n", "meta": {"content_hash": "ff4e155f31edb907f21c66883f056b9d", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 68, "avg_line_length": 17.14814814814815, "alnum_prop": 0.7213822894168467, "repo_name": "blueiou/mymovie", "id": "a98ec59f3cb723249116381b58403b33afd5254d", "size": "982", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/entity/Hall.java", "mode": "33188", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "24139"}, {"name": "CSS", "bytes": "101766"}, {"name": "HTML", "bytes": "74262"}, {"name": "Java", "bytes": "193574"}, {"name": "JavaScript", "bytes": "441586"}]}} {"text": "\n\npackage com.hazelcast.transaction;\n\nimport com.hazelcast.core.DistributedObject;\n\nimport javax.transaction.xa.XAResource;\n\n/**\n * Interface for providing Hazelcast as an XAResource\n */\npublic interface HazelcastXAResource extends XAResource, DistributedObject {\n\n /**\n * Returns the TransactionContext associated with the current thread.\n *\n * @return TransactionContext associated with the current thread\n * @throws IllegalStateException if no context found\n */\n TransactionContext getTransactionContext();\n\n\n}\n", "meta": {"content_hash": "c0d1c1b2fa7ae7b0a044584208bb3b0f", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 76, "avg_line_length": 23.47826086956522, "alnum_prop": 0.7574074074074074, "repo_name": "lmjacksoniii/hazelcast", "id": "df14336954e7d4ddba7db64cc133846a0faa4dd8", "size": "1165", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hazelcast/src/main/java/com/hazelcast/transaction/HazelcastXAResource.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "948"}, {"name": "Java", "bytes": "28952463"}, {"name": "Shell", "bytes": "13259"}]}} {"text": "\ufeffusing System;\n\nnamespace Orleans.Runtime\n{\n [Serializable]\n public struct MembershipVersion : IComparable, IEquatable\n {\n private readonly long version;\n\n public MembershipVersion(long version)\n {\n this.version = version;\n }\n\n public static MembershipVersion MinValue => new MembershipVersion(long.MinValue);\n\n public int CompareTo(MembershipVersion other) => this.version.CompareTo(other.version);\n\n public bool Equals(MembershipVersion other) => this.version == other.version;\n\n public override bool Equals(object obj) => obj is MembershipVersion other && this.Equals(other);\n\n public override int GetHashCode() => this.version.GetHashCode();\n\n public override string ToString() => this.version.ToString();\n\n public static bool operator ==(MembershipVersion left, MembershipVersion right) => left.version == right.version;\n public static bool operator !=(MembershipVersion left, MembershipVersion right) => left.version != right.version;\n public static bool operator >=(MembershipVersion left, MembershipVersion right) => left.version >= right.version;\n public static bool operator <=(MembershipVersion left, MembershipVersion right) => left.version <= right.version;\n public static bool operator >(MembershipVersion left, MembershipVersion right) => left.version > right.version;\n public static bool operator <(MembershipVersion left, MembershipVersion right) => left.version < right.version;\n }\n}\n", "meta": {"content_hash": "a36ddfd0fb6a0a7997dba91fc19af85d", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 121, "avg_line_length": 46.3235294117647, "alnum_prop": 0.7123809523809523, "repo_name": "sergeybykov/orleans", "id": "9adc17ab78c005aff1760ce3a241197063c2a2a6", "size": "1577", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Orleans.Core/Runtime/MembershipVersion.cs", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "8670"}, {"name": "C#", "bytes": "9688518"}, {"name": "F#", "bytes": "2313"}, {"name": "PLSQL", "bytes": "23892"}, {"name": "PLpgSQL", "bytes": "53037"}, {"name": "PowerShell", "bytes": "1743"}, {"name": "Shell", "bytes": "3725"}, {"name": "Smalltalk", "bytes": "1436"}, {"name": "TSQL", "bytes": "22730"}]}} {"text": "\n\n\n#include \n#include \"sliding-text-layer.h\"\n\n\n#define ANIMATION_DIRECTION_UP 0\n#define ANIMATION_DIRECTION_DOWN 1\n\n\ntypedef struct SlidingTextLayerData {\n char* text_next;\n char* text_current;\n GColor color_text;\n GFont font;\n bool is_animating;\n GTextOverflowMode overflow;\n GTextAlignment align;\n Animation* animation;\n AnimationImplementation implementation;\n AnimationImplementation implementation_bounce;\n uint16_t offset;\n uint8_t direction;\n AnimationCurve curve;\n int8_t adjustment;\n uint16_t duration;\n} SlidingTextLayerData;\n\n\nstatic void animate(SlidingTextLayer* layer, uint8_t direction);\nstatic void animate_bounce(SlidingTextLayer* layer, uint8_t direction);\nstatic void render(Layer* layer, GContext* ctx);\nstatic void animation_started(Animation *anim, void *context);\nstatic void animation_stopped(Animation *anim, bool stopped, void *context);\nstatic void animation_stopped_bounce(Animation *anim, bool stopped, void *context);\nstatic void update(Animation* anim, AnimationProgress dist_normalized);\nstatic void update_bounce(Animation* anim, AnimationProgress dist_normalized);\n#define get_layer(layer) (Layer*)layer\n#define get_data(layer) (SlidingTextLayerData*) layer_get_data(get_layer(layer))\n#define layer_get_height(layer) layer_get_bounds(layer).size.h\n#define layer_get_width(layer) layer_get_bounds(layer).size.w\n\nstatic int getDuration(){\n int multiplierNumber = ((rand() % 6) + 1) * 100;\n return multiplierNumber;\n}\n\nSlidingTextLayer* sliding_text_layer_create(GRect rect) {\n SlidingTextLayer* layer = (SlidingTextLayer*)layer_create_with_data(rect, sizeof(SlidingTextLayerData));\n SlidingTextLayerData* data = get_data(layer);\n data->color_text = GColorBlack;\n data->align = GTextAlignmentCenter;\n data->overflow = GTextOverflowModeFill;\n data->offset = 0;\n data->is_animating = false;\n data->implementation.update = update;\n data->implementation_bounce.update = update_bounce;\n data->curve = AnimationCurveEaseInOut;\n data->duration = getDuration();\n layer_set_update_proc(get_layer(layer), render);\n return layer;\n}\n\nvoid sliding_text_layer_destroy(SlidingTextLayer* layer) {\n SlidingTextLayerData* data = get_data(layer);\n if (data->is_animating) {\n\n }\n layer_destroy(get_layer(layer));\n}\n\nvoid sliding_text_layer_animate_up(SlidingTextLayer* layer) {\n animate(layer, ANIMATION_DIRECTION_UP);\n}\n\nvoid sliding_text_layer_animate_down(SlidingTextLayer* layer) {\n animate(layer, ANIMATION_DIRECTION_DOWN);\n}\n\nvoid sliding_text_layer_animate_bounce_up(SlidingTextLayer* layer) {\n animate_bounce(layer, ANIMATION_DIRECTION_UP);\n}\n\nvoid sliding_text_layer_animate_bounce_down(SlidingTextLayer* layer) {\n animate_bounce(layer, ANIMATION_DIRECTION_DOWN);\n}\n\nvoid sliding_text_layer_set_text(SlidingTextLayer* layer, char* text) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n free(data->text_current);\n data->text_current = text;\n layer_mark_dirty(get_layer(layer));\n}\n\nvoid sliding_text_layer_set_next_text(SlidingTextLayer* layer, char* text) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n data->text_next = text;\n}\n\nvoid sliding_text_layer_set_duration(SlidingTextLayer* layer, uint16_t duration) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n data->duration = duration;\n}\n\nvoid sliding_text_layer_set_animation_curve(SlidingTextLayer* layer, AnimationCurve curve) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n data->curve = curve;\n}\n\nvoid sliding_text_layer_set_font(SlidingTextLayer* layer, GFont font) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n data->font = font;\n layer_mark_dirty(get_layer(layer));\n}\n\nvoid sliding_text_layer_set_text_color(SlidingTextLayer* layer, GColor color) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n data->color_text = color;\n layer_mark_dirty(get_layer(layer));\n}\n\nvoid sliding_text_layer_set_text_alignment(SlidingTextLayer* layer, GTextAlignment alignment) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n data->align = alignment;\n}\n\nvoid sliding_text_layer_set_vertical_adjustment(SlidingTextLayer* layer, int8_t adjustment) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n data->adjustment = adjustment;\n}\n\nbool sliding_text_layer_is_animating(SlidingTextLayer* layer) {\n if (! layer) {\n return false;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return false;\n }\n return data->is_animating;\n}\n\n\nstatic void animate(SlidingTextLayer* layer, uint8_t direction) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n if (data->is_animating) {\n return;\n }\n\n data->direction = direction;\n data->offset = 0;\n data->animation = animation_create();\n animation_set_duration(data->animation, data->duration);\n animation_set_curve(data->animation, data->curve);\n animation_set_implementation(data->animation, &data->implementation);\n animation_set_handlers(data->animation, (AnimationHandlers) {\n .started = animation_started,\n .stopped = animation_stopped\n }, (void*)layer);\n animation_schedule(data->animation);\n}\n\nstatic void animate_bounce(SlidingTextLayer* layer, uint8_t direction) {\n if (! layer) {\n return;\n }\n SlidingTextLayerData* data = get_data(layer);\n if (! data) {\n return;\n }\n if (data->is_animating) {\n return;\n }\n\n data->direction = direction;\n data->offset = 0;\n data->animation = animation_create();\n animation_set_duration(data->animation, data->duration / 2);\n animation_set_curve(data->animation, data->curve);\n animation_set_implementation(data->animation, &data->implementation_bounce);\n animation_set_handlers(data->animation, (AnimationHandlers) {\n .started = animation_started,\n .stopped = animation_stopped_bounce\n }, (void*)layer);\n animation_schedule(data->animation);\n}\n\nstatic void render(Layer* layer, GContext* ctx) {\n SlidingTextLayerData* data = get_data(layer);\n graphics_context_set_text_color(ctx, data->color_text);\n\n#if STL_DEBUG\n graphics_context_set_fill_color(ctx, GColorBlack);\n graphics_fill_rect(ctx, layer_get_bounds(layer), 0, GCornerNone);\n#endif\n\n if (data->is_animating) {\n graphics_draw_text(ctx,\n data->direction == ANIMATION_DIRECTION_UP ? data->text_next : data->text_current,\n data->font,\n GRect(0, data->adjustment - data->offset, layer_get_width(layer), layer_get_height(layer)),\n data->overflow,\n data->align,\n NULL);\n graphics_draw_text(ctx,\n data->direction == ANIMATION_DIRECTION_UP ? data->text_current : data->text_next,\n data->font,\n GRect(0, layer_get_height(layer) + data->adjustment - data->offset, layer_get_width(layer), layer_get_height(layer)),\n data->overflow,\n data->align,\n NULL);\n }\n else {\n graphics_draw_text(ctx,\n data->text_current,\n data->font,\n GRect(0, data->adjustment, layer_get_width(layer), layer_get_height(layer)),\n data->overflow,\n data->align,\n NULL);\n }\n}\n\nstatic void animation_started(Animation *anim, void *context) {\n#if STL_DEBUG\n APP_LOG(APP_LOG_LEVEL_DEBUG, \"%d\", heap_bytes_free());\n#endif\n SlidingTextLayerData* data = get_data((SlidingTextLayer*)context);\n data->is_animating = true;\n}\n\nstatic void animation_stopped(Animation *anim, bool stopped, void *context) {\n SlidingTextLayerData* data = get_data((SlidingTextLayer*)context);\n data->is_animating = false;\n data->text_current = data->text_next;\n data->text_next = false;\n#ifdef PBL_SDK_2\n animation_destroy(anim);\n#endif\n}\n\nstatic void animation_stopped_bounce(Animation *anim, bool stopped, void *context) {\n SlidingTextLayerData* data = get_data((SlidingTextLayer*)context);\n data->is_animating = false;\n}\n\nstatic void update(Animation* anim, AnimationProgress dist_normalized) {\n SlidingTextLayer* layer = (SlidingTextLayer*)animation_get_context(anim);\n SlidingTextLayerData* data = get_data(layer);\n\n uint16_t percent = (100 * dist_normalized) / (ANIMATION_NORMALIZED_MAX);\n if (data->direction == ANIMATION_DIRECTION_UP) {\n data->offset = layer_get_height(layer) - ((uint16_t)(layer_get_height(layer) * percent) / 100);\n }\n else {\n data->offset = (uint16_t)((layer_get_height(layer) * percent) / 100);\n }\n layer_mark_dirty(get_layer(layer));\n}\n\nstatic void update_bounce(Animation* anim, AnimationProgress dist_normalized) {\n SlidingTextLayer* layer = (SlidingTextLayer*)animation_get_context(anim);\n SlidingTextLayerData* data = get_data(layer);\n\n uint16_t max_height = (layer_get_height(layer) * 100) / 3;\n\n uint16_t percent = (dist_normalized * 100) / ANIMATION_NORMALIZED_MAX;\n if (data->direction == ANIMATION_DIRECTION_UP) {\n data->offset = (layer_get_height(layer) - (uint16_t)(max_height * percent) / 10000);\n }\n else {\n data->offset = (uint16_t)((max_height * percent) / 10000);\n }\n layer_mark_dirty(get_layer(layer));\n}\n", "meta": {"content_hash": "d95942fadeb3525b3e99348bfdd4757b", "timestamp": "", "source": "github", "line_count": 330, "max_line_length": 123, "avg_line_length": 28.35757575757576, "alnum_prop": 0.6994015815345159, "repo_name": "chrisschlitt/Pebble_MLB", "id": "d3cefa38ee9497bf94941320675a4c0cb3ec1a26", "size": "10538", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/sliding-text-layer.c", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "57862"}, {"name": "JavaScript", "bytes": "15057"}, {"name": "Python", "bytes": "1136"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.Linq;\nusing System.Text;\n\nnamespace OakIdeas.Schema.Core.CreativeWorks\n{\n public class ItemListElement\n {\n public string Value { get; set; }\n public string Order { get; set; }\n }\n}\n", "meta": {"content_hash": "34e6bcb3c61905271db69b3ecf73a655", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 44, "avg_line_length": 20.46153846153846, "alnum_prop": 0.6842105263157895, "repo_name": "oakcool/OakIdeas.Schema", "id": "d54987f615832be193ae406beee917da8d8a2342", "size": "268", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "OakIdeas.Schema.Core/CreativeWorks/ItemListElement.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "184653"}]}} {"text": "SYNONYM\n\n#### According to\nThe Catalogue of Life, 3rd January 2011\n\n#### Published in\nnull\n\n#### Original name\nnull\n\n### Remarks\nnull", "meta": {"content_hash": "76ed169cedffe0ece8a30b3ff94e43ee", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "d785f2806c0140eba2364cfb298587c9616ed2c6", "size": "183", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Liliopsida/Liliales/Liliaceae/Gagea/Gagea pauciflora/ Syn. Lloydia szechenyiana/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\n\npackage com.themodernway.server.core.security;\n\nimport java.io.IOException;\nimport java.util.List;\n\nimport com.themodernway.server.core.AbstractCoreLoggingBase;\nimport com.themodernway.server.core.ICoreCommon;\nimport com.themodernway.server.core.logging.IHasLogging;\nimport com.themodernway.server.core.logging.LoggingOps;\n\npublic class DefaultAuthorizationProvider extends AbstractCoreLoggingBase implements IAuthorizationProvider, ICoreCommon, IHasLogging\n{\n public DefaultAuthorizationProvider()\n {\n if (logger().isInfoEnabled())\n {\n logger().info(LoggingOps.THE_MODERN_WAY_MARKER, \"DefaultAuthorizationProvider()\");\n }\n }\n\n @Override\n public IAuthorizationResult isAuthorized(final Object target, List roles)\n {\n if (null == target)\n {\n if (logger().isErrorEnabled())\n {\n logger().error(LoggingOps.THE_MODERN_WAY_MARKER, \"error null target\");\n }\n return new AuthorizationResult(false, E_RUNTIMEERROR, \"error null target\");\n }\n if (null == roles)\n {\n if (logger().isErrorEnabled())\n {\n logger().error(LoggingOps.THE_MODERN_WAY_MARKER, \"error null roles\");\n }\n return new AuthorizationResult(false, E_RUNTIMEERROR, \"error null roles\");\n }\n roles = toUnique(roles);\n\n if (roles.isEmpty())\n {\n if (logger().isErrorEnabled())\n {\n logger().error(LoggingOps.THE_MODERN_WAY_MARKER, \"error empty roles\");\n }\n return new AuthorizationResult(false, E_RUNTIMEERROR, \"error empty roles\");\n }\n if (target instanceof IAuthorizedObject)\n {\n if (logger().isDebugEnabled())\n {\n logger().debug(LoggingOps.THE_MODERN_WAY_MARKER, \"dispatch authorization roles \" + toPrintableString(roles));\n }\n final IAuthorizationResult result = ((IAuthorizedObject) target).isAuthorized(roles);\n\n if (null != result)\n {\n if (logger().isDebugEnabled())\n {\n logger().debug(LoggingOps.THE_MODERN_WAY_MARKER, \"dispatch to authorization result \" + result.toString());\n }\n return result;\n }\n if (logger().isErrorEnabled())\n {\n logger().error(LoggingOps.THE_MODERN_WAY_MARKER, \"error null authorization result\");\n }\n return new AuthorizationResult(false, E_RUNTIMEERROR, \"error null authorization result\");\n }\n final Authorized authorized = target.getClass().getAnnotation(Authorized.class);\n\n if (null == authorized)\n {\n if (logger().isDebugEnabled())\n {\n logger().debug(LoggingOps.THE_MODERN_WAY_MARKER, \"pass no authorizations present\");\n }\n return new AuthorizationResult(true, I_WASVALIDATED, \"pass no authorizations present\");\n }\n List list = toUnique(authorized.not());\n\n if (false == list.isEmpty())\n {\n for (final String type : list)\n {\n if (roles.contains(type))\n {\n if (logger().isDebugEnabled())\n {\n logger().debug(LoggingOps.THE_MODERN_WAY_MARKER, \"fail not role \" + type + \" in roles \" + toPrintableString(roles));\n }\n return new AuthorizationResult(false, E_EXCLUDEDROLE, \"fail not role \" + type);\n }\n }\n }\n long look = 0;\n\n list = toUnique(authorized.all());\n\n if (false == list.isEmpty())\n {\n for (final String type : list)\n {\n if (false == roles.contains(type))\n {\n if (logger().isDebugEnabled())\n {\n logger().debug(LoggingOps.THE_MODERN_WAY_MARKER, \"fail and role \" + type + \" in roles \" + toPrintableString(roles));\n }\n return new AuthorizationResult(false, E_NOTVALIDROLE, \"fail and role \" + type);\n }\n }\n look++;\n }\n list = toUnique(authorized.any());\n\n if (false == list.isEmpty())\n {\n for (final String type : list)\n {\n if (roles.contains(type))\n {\n if (logger().isDebugEnabled())\n {\n logger().debug(LoggingOps.THE_MODERN_WAY_MARKER, \"pass any role \" + type + \" in roles \" + toPrintableString(roles));\n }\n return new AuthorizationResult(true, I_WASVALIDATED, \"pass any role \" + type);\n }\n }\n if (logger().isDebugEnabled())\n {\n logger().debug(LoggingOps.THE_MODERN_WAY_MARKER, \"fail any \" + toPrintableString(list) + \" in roles \" + toPrintableString(roles));\n }\n return new AuthorizationResult(false, E_NOTVALIDROLE, \"fail any role\");\n }\n if (look < 1)\n {\n list = toUnique(authorized.value());\n\n if (false == list.isEmpty())\n {\n for (final String type : list)\n {\n if (false == roles.contains(type))\n {\n if (logger().isDebugEnabled())\n {\n logger().debug(LoggingOps.THE_MODERN_WAY_MARKER, \"fail value role \" + type + \" in roles \" + toPrintableString(roles));\n }\n return new AuthorizationResult(false, E_NOTVALIDROLE, \"fail value role \" + type);\n }\n }\n }\n }\n if (logger().isDebugEnabled())\n {\n logger().debug(LoggingOps.THE_MODERN_WAY_MARKER, \"pass no authorizations matched in roles \" + toPrintableString(roles));\n }\n return new AuthorizationResult(true, I_WASVALIDATED, \"pass no authorizations matched\");\n }\n\n @Override\n public void close() throws IOException\n {\n if (logger().isInfoEnabled())\n {\n logger().info(LoggingOps.THE_MODERN_WAY_MARKER, \"DefaultAuthorizationProvider().close()\");\n }\n }\n}\n", "meta": {"content_hash": "02eff669fe596dfb657b72e2f02eacab", "timestamp": "", "source": "github", "line_count": 174, "max_line_length": 146, "avg_line_length": 36.770114942528735, "alnum_prop": 0.5236011253516724, "repo_name": "themodernway/themodernway-server-core", "id": "b543982865557fafc9b0c7763ddf61c4e7cd09ad", "size": "7022", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/groovy/com/themodernway/server/core/security/DefaultAuthorizationProvider.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Groovy", "bytes": "132081"}, {"name": "Java", "bytes": "761126"}, {"name": "JavaScript", "bytes": "187"}]}} {"text": "\n\n \n \n \n founify: Not compatible \ud83d\udc7c\n \n \n \n \n \n \n \n \n \n \n

\n
\n \n
\n
\n
\n
\n \u00ab Up\n

\n founify\n \n 8.10.0\n Not compatible \ud83d\udc7c\n \n

\n

\ud83d\udcc5 (2022-04-26 16:21:57 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-num            base        Num library distributed with the OCaml compiler\nbase-threads        base\nbase-unix           base\ncamlp5              7.14        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\nconf-perl           2           Virtual package relying on perl\ncoq                 8.6.1       Formal proof management system\nnum                 0           The Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.05.0      The OCaml compiler (virtual package)\nocaml-base-compiler 4.05.0      Official 4.05.0 release\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.3       A library manager for OCaml\n# opam file:\nopam-version: "2.0"\nmaintainer: "Hugo.Herbelin@inria.fr"\nhomepage: "https://github.com/coq-contribs/founify"\nlicense: "Unknown"\nbuild: [make "-j%{jobs}%"]\ninstall: [make "install"]\nremove: ["rm" "-R" "%{lib}%/coq/user-contrib/FOUnify"]\ndepends: [\n  "camlp5"\n  "ocaml"\n  "coq" {>= "8.10" & < "8.11~"}\n]\ntags: [\n  "keyword: First-order Unification"\n  "keyword: Robinson"\n  "category: Computer Science/Decision Procedures and Certified Algorithms/Correctness proofs of algorithms"\n  "category: Miscellaneous/Extracted Programs/Type checking unification and normalization"\n]\nauthors: [\n  "Jocelyne Rouyer"\n]\nbug-reports: "https://github.com/coq-contribs/founify/issues"\ndev-repo: "git+https://github.com/coq-contribs/founify.git"\nsynopsis: "Correctness and extraction of the unification algorithm"\ndescription: """\nA notion of terms based on symbols without fixed arities is defined\nand an extended unification problem is proved solvable on these terms.\nAn algorithm, close from Robinson algorithm, can be extracted from the\nproof."""\nflags: light-uninstall\nurl {\n  src: "https://github.com/coq-contribs/founify/archive/v8.10.0.tar.gz"\n  checksum: "md5=2f68b8dc8c863e75077d1667a36cf10f"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-founify.8.10.0 coq.8.6.1
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.6.1).\nThe following dependencies couldn't be met:\n  - coq-founify -> coq >= 8.10\nYour request can't be satisfied:\n  - No available version of coq satisfies the constraints\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-founify.8.10.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "37e8885d5d42534bdbcae4a57fa8a332", "timestamp": "", "source": "github", "line_count": 176, "max_line_length": 159, "avg_line_length": 40.94886363636363, "alnum_prop": 0.5534896628278063, "repo_name": "coq-bench/coq-bench.github.io", "id": "70b78f7588db0c24354178252cf62d75e1aa0265", "size": "7232", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.05.0-2.0.1/released/8.6.1/founify/8.10.0.html", "mode": "33188", "license": "mit", "language": []}} {"text": "\n\npackage com.vmware.vim25;\n\n/**\n* @author Steve Jin (http://www.doublecloud.org)\n* @version 5.1\n*/\n\n@SuppressWarnings(\"all\")\npublic class VspanPortgroupPromiscChangeFault extends DvsFault {\n public String portgroupName;\n\n public String getPortgroupName() {\n return this.portgroupName;\n }\n\n public void setPortgroupName(String portgroupName) {\n this.portgroupName=portgroupName;\n }\n}", "meta": {"content_hash": "81f1c8fac893060265bb88e1b652204c", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 64, "avg_line_length": 18.761904761904763, "alnum_prop": 0.7436548223350253, "repo_name": "xebialabs/vijava", "id": "cd0e2477baaac7cc6b732ec4c4b1743717a76d24", "size": "2034", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/com/vmware/vim25/VspanPortgroupPromiscChangeFault.java", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Groovy", "bytes": "377"}, {"name": "Java", "bytes": "7842140"}]}} {"text": "layout: post\npublished: true\ntitle: \"It might not get weirder than this\"\nlink: https://sites.google.com/site/sophieinnorthkorea/\npermalink: /2013/02/it-might-not-get-weirder-than-this/\n---\n\nFascinating account by Eric Schmidt's daughter Sophie about their trip to North Korea.\n\n> Top Level Take-aways:\n>\n> 1. Go to North Korea if you can. It is very, very strange.\n> 2. If it is January, disregard the above. It is very, very cold.\n> 3. Nothing I'd read or heard beforehand really prepared me for what we saw.\n", "meta": {"content_hash": "0a43c10c31d1bff63f9b91d5b655d492", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 86, "avg_line_length": 36.42857142857143, "alnum_prop": 0.7470588235294118, "repo_name": "alexbilbie/alexbilbie.github.com", "id": "845e5208c4bcdb05e221f4452e45385c24fe1f39", "size": "514", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2013-02-10-it-might-not-get-weirder-than-this.md", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "250805"}, {"name": "HTML", "bytes": "42201"}]}} {"text": "\n\npackage net.firejack.platform.web.security.action;\n\nimport net.firejack.platform.api.registry.domain.Action;\nimport net.firejack.platform.core.model.registry.HTTPMethod;\n\npublic enum StandardAction {\n\n READ, READ_ALL, CREATE, UPDATE, DELETE, ADVANCED_SEARCH;\n\n public static StandardAction detectStandardAction(Action action) {\n return isReadAction(action) ? READ : isReadAllAction(action) ? READ_ALL :\n isCreateAction(action) ? CREATE : isUpdateAction(action) ? UPDATE :\n isDeleteAction(action) ? DELETE : isAdvancedSearchAction(action) ? ADVANCED_SEARCH : null;\n }\n\n public static boolean isReadAction(Action action) {\n return HTTPMethod.GET.equals(action.getMethod()) &&\n action.getName().equalsIgnoreCase(ActionDetectorFactory.READ_ACTION);\n }\n\n public static boolean isCreateAction(Action action) {\n return HTTPMethod.POST.equals(action.getMethod()) &&\n action.getName().equalsIgnoreCase(ActionDetectorFactory.CREATE_ACTION);\n }\n\n public static boolean isUpdateAction(Action action) {\n return HTTPMethod.PUT.equals(action.getMethod()) &&\n action.getName().equalsIgnoreCase(ActionDetectorFactory.UPDATE_ACTION);\n }\n\n public static boolean isDeleteAction(Action action) {\n return HTTPMethod.DELETE.equals(action.getMethod()) &&\n action.getName().equalsIgnoreCase(ActionDetectorFactory.DELETE_ACTION);\n }\n\n public static boolean isReadAllAction(Action action) {\n return HTTPMethod.GET.equals(action.getMethod()) &&\n action.getName().equalsIgnoreCase(ActionDetectorFactory.READ_ALL_ACTION);\n }\n\n public static boolean isAdvancedSearchAction(Action action) {\n return action.getName().equalsIgnoreCase(ActionDetectorFactory.ADVANCED_SEARCH_ACTION);\n }\n\n}", "meta": {"content_hash": "6535fd23b4485e1a90a32cc3a2df8367", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 114, "avg_line_length": 39.61702127659574, "alnum_prop": 0.7056928034371643, "repo_name": "firejack-open/Firejack-Platform", "id": "62e3725bb24d868722ca944899a32be7fed6ec33", "size": "2669", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "integration/src/main/java/net/firejack/platform/web/security/action/StandardAction.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ActionScript", "bytes": "4068"}, {"name": "CSS", "bytes": "5861839"}, {"name": "Java", "bytes": "8816378"}, {"name": "JavaScript", "bytes": "37310201"}, {"name": "Python", "bytes": "7764"}, {"name": "Ruby", "bytes": "11804"}, {"name": "Shell", "bytes": "38753"}]}} {"text": "using System.Collections.Generic;\nusing Microsoft.VisualStudio.TestTools.UnitTesting;\n\nnamespace Z.Collections.Test\n{\n [TestClass]\n public class System_Collections_Generic_IDictionary_TKey_TValue_ContainsAllKey\n {\n [TestMethod]\n public void ContainsAllKey()\n {\n // Type\n var @this = new Dictionary {{\"Fizz\", \"Buzz\"}, {\"Fizz2\", \"Buzz2\"}};\n\n // Exemples\n bool value1 = @this.ContainsAllKey(\"Fizz\", \"Fizz2\"); // return true;\n bool value2 = @this.ContainsAllKey(\"Fizz\", \"Fizz3\"); // return false;\n\n // Unit Test\n Assert.IsTrue(value1);\n Assert.IsFalse(value2);\n }\n }\n}", "meta": {"content_hash": "e916fc765b3b65ba55f2862e5ff51e4c", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 94, "avg_line_length": 29.5, "alnum_prop": 0.5861581920903954, "repo_name": "zzzprojects/Z.ExtensionMethods", "id": "e982aba02782c2a6ac82d6bd1d7ff7c275490348", "size": "1133", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/Z.Collections.Test/System.Collections.Generic.IDictionary[TKey,TValue]/IDictionary[TKey,TValue].ContainsAllKey.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "2614731"}, {"name": "Visual Basic", "bytes": "1411898"}]}} {"text": "\n\n// Calculates and returns the force components needed to achieve the\n// given velocity. is a dictionary containing the rider and\n// environmental parameters all in metric units. 'velocity' is in km/h.\nfunction CalculateForces(velocity, params) {\n // calculate Fgravity\n var Fgravity = 9.8067 *\n (params.rp_wr + params.rp_wb) *\n Math.sin(Math.atan(params.ep_g / 100.0));\n\n // calculate Frolling\n var Frolling = 9.8067 *\n (params.rp_wr + params.rp_wb) *\n Math.cos(Math.atan(params.ep_g / 100.0)) *\n (params.ep_crr);\n\n // calculate Fdrag\n var Fdrag = 0.5 *\n (params.rp_a) *\n (params.rp_cd) *\n (params.ep_rho) *\n (velocity * 1000.0 / 3600.0) *\n (velocity * 1000.0 / 3600.0);\n\n // cons up and return the force components\n var ret = { };\n ret.Fgravity = Fgravity;\n ret.Frolling = Frolling;\n ret.Fdrag = Fdrag;\n return ret;\n}\n\n// Calculates and returns the power needed to achieve the given\n// velocity. is a dictionary containing the rider and\n// environmenetal parameters all in metric units. 'velocity'\n// is in km/h. Returns power in watts.\nfunction CalculatePower(velocity, params) {\n // calculate the forces on the rider.\n var forces = CalculateForces(velocity, params);\n var totalforce = forces.Fgravity + forces.Frolling + forces.Fdrag;\n\n // calculate necessary wheelpower\n var wheelpower = totalforce * (velocity * 1000.0 / 3600.0);\n\n // calculate necessary legpower\n var legpower = wheelpower / (1.0 - (params.rp_dtl/100.0));\n\n return legpower;\n}\n\n// Calculates the velocity obtained from a given power. is a\n// dictionary containing the rider and model parameters, all in\n// metric units.\n//\n// Runs a simple midpoint search, using CalculatePower().\n//\n// Returns velocity, in km/h.\nexport function CalculateVelocity(power, params) {\n // How close to get before finishing.\n var epsilon = 0.000001;\n\n // Set some reasonable upper / lower starting points.\n var lowervel = -1000.0;\n var uppervel = 1000.0;\n var midvel = 0.0;\n\n var midpow = CalculatePower(midvel, params);\n\n // Iterate until completion.\n var itcount = 0;\n do {\n if (Math.abs(midpow - power) < epsilon)\n break;\n\n if (midpow > power)\n uppervel = midvel;\n else\n lowervel = midvel;\n\n midvel = (uppervel + lowervel) / 2.0;\n midpow = CalculatePower(midvel, params);\n } while (itcount++ < 100);\n\n return midvel;\n}\n", "meta": {"content_hash": "540f5fc9838ca6b1c4e2dc4ba93bb685", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 72, "avg_line_length": 29.569767441860463, "alnum_prop": 0.6354699174203696, "repo_name": "chadj/gpedal", "id": "2ac95b635b27f49c241e9cbd32abda6e1ded8f3e", "size": "2704", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/lib/power_v_speed.js", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "96082"}]}} {"text": " \r\n \r\n Course Properties \r\n Allows instructors and administrators to delete a course, or manage its various properties such as title, description, access level, course icon and more. \r\n \r\n \r\n ATutor Team \r\n info@atutor.ca \r\n \r\n \r\n http://atutor.ca \r\n GPL \r\n\t \r\n 0.1 \r\n\t\t\r\n\t\t\texisting\r\n\t\t\t \r\n\t\t\r\n 2005-08-22 \r\n stable \r\n This is a core module. \r\n \r\n", "meta": {"content_hash": "0d62030ad92b3dd03847e022de9df918", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 197, "avg_line_length": 37.69565217391305, "alnum_prop": 0.615916955017301, "repo_name": "CaviereFabien/Test", "id": "21f8865d4ab8e4228d0a224ef046863b3e5fe515", "size": "867", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ATutor/mods/_core/properties/module.xml", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\n\n\t\n\t\tPseudo Blog\n\t\t\n\t\t\n\t\n\t\n\t\t
\n\t\t\t
\n\t\t\t\t

Keyboard Thoughts

\n\t\t\t\t\n\t\t\t
\n\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t
\n\t\t\t\t\t\t\"codecademy\"\n\t\t\t\t\t\t

Codecademy

\n\t\t\t\t\t\t

Why is this one of your favorite sites?

\n\t\t\t\t\t\t

This is one of the places that I realized that I loved to code!

\n\t\t\t\t\t\t

What area of the site is your eye drawn to when looking at the homepage? Stand back further, what area is your eye drawn to now? Is that area the most important area of the site?

\n\t\t\t\t\t\t

The two areas that my eyes are drawn to are the text-line interface and the \"sign-up\" button.

\n\t\t\t\t\t\t

How would you describe the website visually? List 5 adjectives. (i.e. \"pretty\", \"elegant\", \"simple\", \"dark\", \"cluttered\", \"basic\" etc)

\n\t\t\t\t\t\t

simple, minimalistic, sleek, dull, subdued

\n\t\t\t\t\t\t

What problem does this website solve? What content does it have?

\n\t\t\t\t\t\t

This website gives people the opportunity to learn how to code. It gives simple information on what the purpose of the website is and provides a clear call to action.

\n\t\t\t\t\t\t

What 5 adjectives would you use to describe the content, focus, and purpose of the site? (i.e. \"practical\", \"fun\", \"whimsical\", \"silly\", \"serious\" etc) How does that compare to the adjectives you used to describe the site visually?

\n\t\t\t\t\t\t

\"simple, clear, unambiguous, instructional, and concise\" -- These adjectives match the visual adjectives quite closely.

\n\t\t\t\t\t\t

How easy is it to find what you are looking for from the homepage? How about from another page?

\n\t\t\t\t\t\t

It is very easy; everything has been laid out quite simply in plain view.

\n\t\t\t\t\t\t

How easy is it to browse through all the content of the site?

\n\t\t\t\t\t\t

It is easy to browse through the content.

\n\t\t\t\t\t\t

How do you feel after being on the site for a while? (i.e. \"bored\", \"happy\", \"anxious or hyper\", \"like I wasted a lot of time\" etc)

\n\t\t\t\t\t\t

I would be excited to get crackin'!

\n\t\t\t\t\t\t

Does the site sell anything? If so, have you purchased any of it? Why or why not?

\n\t\t\t\t\t\t

The site sells the opportunity to learn, but for FREE! I've taken part in several tracks on Codecademy.

\n\t\t\t\t\t
\n\t\t\t\t\t
\n\t\t\t\t\t\t\"apple\"\n\t\t\t\t\t\t

Apple

\n\t\t\t\t\t\t

Why is this one of your favorite sites?

\n\t\t\t\t\t\t

This is not my favorite website, but it's a website that I admire for it's ability to maintain the attention of its customers.

\n\t\t\t\t\t\t

What area of the site is your eye drawn to when looking at the homepage? Stand back further, what area is your eye drawn to now? Is that area the most important area of the site?

\n\t\t\t\t\t\t

The graphics are what draws my attention, and then the text that overlays the graphics.

\n\t\t\t\t\t\t

How would you describe the website visually? List 5 adjectives. (i.e. \"pretty\", \"elegant\", \"simple\", \"dark\", \"cluttered\", \"basic\" etc)

\n\t\t\t\t\t\t

penetrating, stunning, saturated, colorful, vibrant

\n\t\t\t\t\t\t

What problem does this website solve? What content does it have?

\n\t\t\t\t\t\t

This website promises an experience through its graphics and powerful text. The graphics coupled with the caption allows for multiple, positive interpretations.

\n\t\t\t\t\t\t

What 5 adjectives would you use to describe the content, focus, and purpose of the site? (i.e. \"practical\", \"fun\", \"whimsical\", \"silly\", \"serious\" etc) How does that compare to the adjectives you used to describe the site visually?

\n\t\t\t\t\t\t

\"fun, powerful, smart, clean, and simple\" -- These adjectives are very much in line with the visual description I came up with.

\n\t\t\t\t\t\t

How easy is it to find what you are looking for from the homepage? How about from another page?

\n\t\t\t\t\t\t

The menu bar makes it quite easy to pick out how to navigate through the website.

\n\t\t\t\t\t\t

How easy is it to browse through all the content of the site?

\n\t\t\t\t\t\t

It is easy to browse through the content.

\n\t\t\t\t\t\t

How do you feel after being on the site for a while? (i.e. \"bored\", \"happy\", \"anxious or hyper\", \"like I wasted a lot of time\" etc)

\n\t\t\t\t\t\t

I would be excited to start looking at specs in hopes of buying my next Apple product!

\n\t\t\t\t\t\t

Does the site sell anything? If so, have you purchased any of it? Why or why not?

\n\t\t\t\t\t\t

I've bought my first Mac Air from this website in hopes that it would put me on the right path to becoming a web developer.

\n\t\t\t\t\t
\n\t\t\t\t\t
\n\t\t\t\t\t\t\"mangastream\"\n\t\t\t\t\t\t

Manga Stream

\n\t\t\t\t\t\t

Why is this one of your favorite sites?

\n\t\t\t\t\t\t

I read manga on occasion, and this is my favorite website to go to for scanned translations.

\n\t\t\t\t\t\t

What area of the site is your eye drawn to when looking at the homepage? Stand back further, what area is your eye drawn to now? Is that area the most important area of the site?

\n\t\t\t\t\t\t

What draws my attention is the neatly organized list of chapter posts. Standing further away gives focus to the three graphic panels that display the three latest chapter posts.

\n\t\t\t\t\t\t

How would you describe the website visually? List 5 adjectives. (i.e. \"pretty\", \"elegant\", \"simple\", \"dark\", \"cluttered\", \"basic\" etc)

\n\t\t\t\t\t\t

subdued, comfortable, easy, basic, lists

\n\t\t\t\t\t\t

What problem does this website solve? What content does it have?

\n\t\t\t\t\t\t

This website attempts to solve the conflict between blog posts and chapter posts. I'm not sure if this has been effectively solved.

\n\t\t\t\t\t\t

What 5 adjectives would you use to describe the content, focus, and purpose of the site? (i.e. \"practical\", \"fun\", \"whimsical\", \"silly\", \"serious\" etc) How does that compare to the adjectives you used to describe the site visually?

\n\t\t\t\t\t\t

\"plain, simple, detailed, separated, and compact\"

\n\t\t\t\t\t\t

How easy is it to find what you are looking for from the homepage? How about from another page?

\n\t\t\t\t\t\t

If it's difficult to parse the content at first glance, the menu bar comes in handy.

\n\t\t\t\t\t\t

How easy is it to browse through all the content of the site?

\n\t\t\t\t\t\t

It is easy to browse through the content.

\n\t\t\t\t\t\t

How do you feel after being on the site for a while? (i.e. \"bored\", \"happy\", \"anxious or hyper\", \"like I wasted a lot of time\" etc)

\n\t\t\t\t\t\t

Reading manga usually makes me feel like I've wasted a great deal of time.

\n\t\t\t\t\t\t

Does the site sell anything? If so, have you purchased any of it? Why or why not?

\n\t\t\t\t\t\t

The site doesn't sell anything.

\n\t\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t
\n\t\t\t\t\t

Useful Links

\n\t\t\t\t\tWireframe Post\n\t\t\t\t
\n\t\t\t\t
.
\n\t\t\t
\n\t\t\t
\n\t\t\t\tCopyright 2014 - Brian H. Paak\n\t\t\t
\n\t\t
\n\t\n", "meta": {"content_hash": "8b2882ccda0cea62709bf8e7d58ce2e2", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 261, "avg_line_length": 84.88775510204081, "alnum_prop": 0.6833754056978002, "repo_name": "bhpaak/bhpaak.github.io", "id": "9a10e0bc007029a281c2241949eb8eadbecd65ae", "size": "8319", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "unit1_projects/analysis_blog.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "5475"}]}} {"text": "module DeepCover\n class Node::Const < Node\n check_completion\n has_child scope: [Node, nil]\n has_child const_name: Symbol\n end\n\n class Node::Cbase < Node\n end\nend\n", "meta": {"content_hash": "cbd22c85ad356fa0db7b06840b24b50a", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 32, "avg_line_length": 17.5, "alnum_prop": 0.6742857142857143, "repo_name": "deep-cover/deep-cover", "id": "5a6b78b80de6fb471ba9b393b47c6f5249b98b2a", "size": "206", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core_gem/lib/deep_cover/node/const.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "47229"}, {"name": "HTML", "bytes": "13770"}, {"name": "JavaScript", "bytes": "339680"}, {"name": "Ruby", "bytes": "645848"}, {"name": "Shell", "bytes": "1152"}]}} {"text": " $attrLists\n * @property-read string $specUrl\n * @property-read array $htmlFormat\n * @property-read string $descriptiveName\n */\nfinal class AmpLiveListItemsItem extends Tag implements Identifiable\n{\n /**\n * ID of the tag.\n *\n * @var string\n */\n const ID = 'AMP-LIVE-LIST [items] item';\n\n /**\n * Array of spec rules.\n *\n * @var array\n */\n const SPEC = [\n SpecRule::TAG_NAME => '$REFERENCE_POINT',\n SpecRule::SPEC_NAME => 'AMP-LIVE-LIST [items] item',\n SpecRule::ATTRS => [\n Attribute::DATA_SORT_TIME => [\n SpecRule::MANDATORY => true,\n ],\n Attribute::DATA_TOMBSTONE => [],\n Attribute::DATA_UPDATE_TIME => [],\n ],\n SpecRule::ATTR_LISTS => [\n AttributeList\\MandatoryIdAttr::ID,\n ],\n SpecRule::SPEC_URL => 'https://amp.dev/documentation/components/amp-live-list/#items',\n SpecRule::HTML_FORMAT => [\n Format::AMP,\n ],\n SpecRule::DESCRIPTIVE_NAME => 'amp-live-list [data-sort-time] child',\n ];\n}\n", "meta": {"content_hash": "7249a3b7f875db7e8dd4b59af86f41b0", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 94, "avg_line_length": 26.650793650793652, "alnum_prop": 0.6206075044669446, "repo_name": "ampproject/amp-toolbox-php", "id": "c8e92f41e3a3ff9a153c47e19cc4bce49df5ca9b", "size": "1679", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/Validator/Spec/Tag/AmpLiveListItemsItem.php", "mode": "33188", "license": "apache-2.0", "language": [{"name": "PHP", "bytes": "1530243"}, {"name": "Shell", "bytes": "653"}]}} {"text": "hasModule($name)) {\n\t\t\t$app->setModule($name, new Module($name));\n\t\t}\n if ($app instanceof \\yii\\web\\Application) {\n $rules[] = [\n 'class' => 'yii\\web\\GroupUrlRule',\n 'prefix' => $name,\n 'rules' => [\n '/' => 'default/index',\n '' => 'default/',\n ],\n ];\n $app->getUrlManager()->addRules($rules, false);\n\n } elseif ($app instanceof \\yii\\console\\Application) {\n\t\t\t$app->controllerMap = array_merge($app->controllerMap, [\n\t\t\t\t'migrate' => [\n\t\t\t\t\t'migrationNamespaces' => [\n\t\t\t\t\t\t'tunect\\Yii2PageHelp\\migrations',\n\t\t\t\t\t],\n\t\t\t\t],\n\t\t\t]);\n\t\t\tif (empty($app->controllerMap['migrate']['class'])) {\n\t\t\t\t$app->controllerMap['migrate']['class'] = 'yii\\console\\controllers\\MigrateController';\n\t\t\t}\n }\n }\n}\n", "meta": {"content_hash": "064800a60b0dc8e262c15abe63900ef6", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 90, "avg_line_length": 26.571428571428573, "alnum_prop": 0.5206093189964157, "repo_name": "dvatri/yii2-page-help", "id": "c769b6de1151a9a161654a800c5ee11abdaade64", "size": "1116", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Bootstrap.php", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "PHP", "bytes": "14094"}]}} {"text": "#pragma once\n\n#include \n\nstatic inline bool plus_overflow_u64(uint64_t x, uint64_t y)\n{\n return ((((uint64_t)(~0)) - x) < y);\n}\n\nstatic inline bool plus_overflow_u32(uint32_t x, uint32_t y)\n{\n return ((((uint32_t)(~0)) - x) < y);\n}\n\n/*\n * This checks to see if two numbers multiplied together are larger\n * than the type that they are. Returns TRUE if OVERFLOWING.\n * If the first parameter \"x\" is greater than zero and\n * if that is true, that the largest possible value 0xFFFFFFFF / \"x\"\n * is less than the second parameter \"y\". If \"y\" is zero then\n * it will also fail because no unsigned number is less than zero.\n */\nstatic inline bool multiply_overflow_u32(uint32_t x, uint32_t y)\n{\n return (x > 0) ? ((((uint32_t)(~0))/x) < y) : false;\n}\n\n#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))\n\n#define AP_WAKE_TRIGGER_DEF 0xffffffff\n\n", "meta": {"content_hash": "e4712aa9fca7f3af949b65d0c1509661", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 70, "avg_line_length": 28.193548387096776, "alnum_prop": 0.6533180778032036, "repo_name": "AMOSSYS/OpenDTeX-Secure-Boot-DRTM", "id": "8b939c39d43ec2b697182ec982051d4beb4fca5d", "size": "874", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "libtxt/src/include/stuff.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Assembly", "bytes": "20578"}, {"name": "C", "bytes": "1003462"}, {"name": "C++", "bytes": "98079"}, {"name": "Logos", "bytes": "721"}, {"name": "Objective-C", "bytes": "4708"}, {"name": "Shell", "bytes": "853"}]}} {"text": "Este repositorio contiene una plantilla LaTeX para el Trabajo de Fin de Grado de la Escuela T\u00e9cnica de Ingenier\u00eda Inform\u00e1tica (ETSInf) de la Universidad Polit\u00e9cnica de Valencia (UPV).\n\n\nEst\u00e1 basada en la plantilla para Microsoft Word [disponible en la web de la ETSInf](http://www.upv.es/entidades/ETSINF/info/plantillaTFG.doc).\n\n# English\n\nThis repository contains a LaTeX template for degree's final project of ETSInf (UPV).\n\nIt's based on the Microsoft Word template [available on ETSInf's website](http://www.upv.es/entidades/ETSINF/info/plantillaTFG.doc).", "meta": {"content_hash": "607e97347baebc686a5256b6887c2aac", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 183, "avg_line_length": 56.0, "alnum_prop": 0.7946428571428571, "repo_name": "Sumolari/TemplateTFG", "id": "939322df11044ed99899014cb8a098f2a2b13d35", "size": "577", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Readme.md", "mode": "33188", "license": "mit", "language": [{"name": "TeX", "bytes": "20797"}]}} {"text": "\n\npackage org.apache.jena.test;\n\nimport junit.framework.TestCase ;\nimport junit.framework.TestSuite ;\nimport org.apache.jena.rdf.model.impl.RDFReaderFImpl;\n\n/**\n * All developers should edit this file to add their tests.\n * Please try to name your tests and test suites appropriately.\n * Note, it is better to name your test suites on creation\n * rather than in this file.\n */\npublic class TestPackage extends TestCase {\n\n static public TestSuite suite() {\n // Reads Turtle (old parser, not up-to-date but we need something for testing.)\n RDFReaderFImpl.alternative(new X_RDFReaderF());\n\n TestSuite ts = new TestSuite() ;\n ts.setName(\"Jena\") ;\n addTest(ts, \"System setup\", TestSystemSetup.suite());\n addTest(ts, \"IRI\", org.apache.jena.irix.TS_IRIx.suite());\n addTest(ts, \"Enhanced\", org.apache.jena.enhanced.test.TestPackage.suite());\n addTest(ts, \"Datatypes\", org.apache.jena.datatypes.TestPackage.suite()) ;\n addTest(ts, \"Graph\", org.apache.jena.graph.test.TestPackage.suite());\n addTest(ts, \"Mem\", org.apache.jena.mem.test.TestMemPackage.suite() );\n addTest(ts, \"Mem2\", org.apache.jena.mem.test.TestGraphMemPackage.suite() );\n addTest(ts, \"Model\", org.apache.jena.rdf.model.test.TestPackage.suite());\n addTest(ts, \"StandardModels\", org.apache.jena.rdf.model.test.TestStandardModels.suite() );\n addTest(ts, \"Turtle\", org.apache.jena.ttl_test.turtle.TurtleTestSuite.suite()) ;\n addTest(ts, \"XML Output\", org.apache.jena.rdfxml.xmloutput.TestPackage_xmloutput.suite());\n addTest(ts, \"Util\", org.apache.jena.util.TestPackage.suite());\n addTest(ts, \"Jena iterator\", org.apache.jena.util.iterator.test.TestPackage.suite() );\n addTest(ts, \"Assembler\", org.apache.jena.assembler.test.TestAssemblerPackage.suite() );\n addTest(ts, \"ARP\", org.apache.jena.rdfxml.xmlinput.TestPackage_xmlinput.suite());\n addTest(ts, \"Vocabularies\", org.apache.jena.vocabulary.test.TestVocabularies.suite() );\n addTest(ts, \"Shared\", org.apache.jena.shared.TestSharedPackage.suite() );\n addTest(ts, \"Reasoners\", org.apache.jena.reasoner.test.TestPackage.suite());\n addTest(ts, \"Composed graphs\", org.apache.jena.graph.compose.test.TestPackage.suite() );\n addTest(ts, \"Ontology\", org.apache.jena.ontology.impl.TestPackage.suite() );\n return ts ;\n }\n\n private static void addTest(TestSuite ts, String name, TestSuite tc) {\n if ( name != null )\n tc.setName(name);\n ts.addTest(tc);\n }\n\n\n\n}\n", "meta": {"content_hash": "2c98a67fbcc7857071faa6d496646640", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 99, "avg_line_length": 47.96296296296296, "alnum_prop": 0.6745173745173745, "repo_name": "apache/jena", "id": "39d218c0715388936529219d4ecc4098fad98b46", "size": "3395", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "jena-core/src/test/java/org/apache/jena/test/TestPackage.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "22246"}, {"name": "C++", "bytes": "5877"}, {"name": "CSS", "bytes": "3241"}, {"name": "Dockerfile", "bytes": "3341"}, {"name": "Elixir", "bytes": "2548"}, {"name": "HTML", "bytes": "69029"}, {"name": "Haml", "bytes": "30030"}, {"name": "Java", "bytes": "35185092"}, {"name": "JavaScript", "bytes": "72788"}, {"name": "Lex", "bytes": "82672"}, {"name": "Makefile", "bytes": "198"}, {"name": "Perl", "bytes": "35662"}, {"name": "Python", "bytes": "416"}, {"name": "Ruby", "bytes": "216471"}, {"name": "SCSS", "bytes": "4242"}, {"name": "Shell", "bytes": "264124"}, {"name": "Thrift", "bytes": "3755"}, {"name": "Vue", "bytes": "104702"}, {"name": "XSLT", "bytes": "65126"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.Linq;\nusing System.Threading.Tasks;\nusing System.Windows.Forms;\n\nnamespace ClipboardText\n{\n\tstatic class Program\n\t{\n\t\t/// \n\t\t/// The main entry point for the application.\n\t\t/// \n\t\t[STAThread]\n\t\tstatic void Main()\n\t\t{\n\t\t\tApplication.EnableVisualStyles();\n\t\t\tApplication.SetCompatibleTextRenderingDefault(false);\n\t\t\tApplication.Run(new Form1());\n\t\t}\n\t}\n}\n", "meta": {"content_hash": "5bd49dec0fbb9364d9ffccb78717b974", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 56, "avg_line_length": 19.727272727272727, "alnum_prop": 0.7235023041474654, "repo_name": "kobake/ClipboardText", "id": "0c15a999f607b57eac8d1ec7f854fe6bc4428450", "size": "436", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Program.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "2475"}]}} {"text": "\r\n\r\ncreate index PID_PATHTION_INTERACTIO on PID_PATHWAY_INTERACTION(INTERACTION_ID) PARALLEL NOLOGGING tablespace CABIO_FUT;\r\ncreate index PID_PATHTION_PATHWAY_ID on PID_PATHWAY_INTERACTION(PATHWAY_ID) PARALLEL NOLOGGING tablespace CABIO_FUT;\r\n\r\n--EXIT;\r\n", "meta": {"content_hash": "9aa38791226eabc138d885d62bd57791", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 120, "avg_line_length": 42.5, "alnum_prop": 0.803921568627451, "repo_name": "NCIP/cabio", "id": "67201ac24db10a27e7d9abaf19b337525d059f6f", "size": "407", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "software/cabio-database/scripts/sql_loader/no_longer_used/indexes/pid_pathway_interaction.cols.sql", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C++", "bytes": "58"}, {"name": "CSS", "bytes": "790679"}, {"name": "HTML", "bytes": "227415"}, {"name": "Java", "bytes": "1695785"}, {"name": "JavaScript", "bytes": "5101781"}, {"name": "PHP", "bytes": "14325"}, {"name": "PLSQL", "bytes": "319454"}, {"name": "Perl", "bytes": "201567"}, {"name": "SQLPL", "bytes": "17416"}, {"name": "Shell", "bytes": "106383"}, {"name": "SourcePawn", "bytes": "3477"}, {"name": "XSLT", "bytes": "19561"}]}} {"text": "\n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n \n\n", "meta": {"content_hash": "d667573fe365d0bc0e6079e76e4ddd4c", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 74, "avg_line_length": 38.2109375, "alnum_prop": 0.6595788182375792, "repo_name": "bkhunter/bkhunter-notes", "id": "e502707defaba77da2109c74b5c78172e9805dc1", "size": "4891", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "res/layout/view_expense_item_activity.xml", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "70590"}]}} {"text": "@class NSString;\n\n@interface UIKeyboardCandidateSingle : UIKeyboardCandidate {\n\tNSString* _candidate;\n}\n-(id)initWithCandidate:(id)candidate;\n// inherited: -(void)dealloc;\n// inherited: -(id)candidate;\n// inherited: -(id)copyWithZone:(NSZone*)zone;\n@end\n\n#endif\n", "meta": {"content_hash": "f0c5ca7823d052d699be2c0b83cbf283", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 60, "avg_line_length": 21.833333333333332, "alnum_prop": 0.7404580152671756, "repo_name": "codyd51/libPassword", "id": "11f24b118a2b7ec37f6b07fca982f79d10d2f76a", "size": "374", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "libPassPrefs/include/UIKit/UIKeyboardCandidateSingle.h", "mode": "33188", "license": "mit", "language": []}} {"text": "from typing import List, Optional\nimport os.path\nfrom pathlib import Path\nfrom collections import ChainMap\nimport json\n\nfrom click import BadParameter\nfrom requests import Session\nfrom doit.action import CmdAction\nfrom doit.tools import create_folder, config_changed\n\nfrom elm_doc import elm_project\nfrom elm_doc import elm_codeshift\nfrom elm_doc import elm_parser\nfrom elm_doc.elm_project import ElmPackage, ElmProject, ProjectConfig, ModuleName\nfrom elm_doc.run_config import RunConfig, Validate\nfrom elm_doc.tasks import package as package_tasks\nfrom elm_doc.utils import Namespace\n\n\nclass actions(Namespace):\n def write_project_elm_json(\n project: ElmProject,\n project_config: ProjectConfig,\n project_modules: List[ModuleName],\n build_path: Path):\n elm_project_with_exposed_modules = dict(ChainMap(\n {'exposed-modules': [module for module in project_modules]},\n project.as_package(project_config).as_json(),\n ))\n elm_json_path = build_path / ElmPackage.DESCRIPTION_FILENAME\n with open(str(elm_json_path), 'w') as f:\n json.dump(elm_project_with_exposed_modules, f)\n\n def run_elm_codeshift(src_dir: Path):\n for elm_file_path in src_dir.glob('**/*.elm'):\n if elm_parser.is_port_module(elm_file_path):\n elm_codeshift.strip_ports_from_file(elm_file_path)\n\n def validate_elm_path(elm_path: Optional[Path]):\n if not elm_path:\n raise BadParameter('please specify the elm executable to use with --elm-path')\n\n class ElmMake(CmdAction):\n def __init__(self, elm_path: Path, build_path: Path, output_path: Path):\n command = [str(elm_path), 'make', '--docs', str(output_path), '--output', '/dev/null']\n super().__init__(command, cwd=str(build_path), shell=False)\n\n class SyncSources(CmdAction):\n '''Copy source files to a single directory. This meets the requirement of Elm\n that a package project can only have a single source directory and gives\n us an isolated environment so that Elm can run in parallel with any invocation\n of Elm within the actual project.\n '''\n\n def __init__(self, project: ElmProject, target_directory: Path):\n sources = ['{}/./'.format(os.path.normpath(source_dir))\n for source_dir in project.source_directories]\n command = ['rsync', '-a', '--delete', '--recursive', '--ignore-errors'] + sources + [str(target_directory)]\n super().__init__(command, cwd=str(project.path), shell=False)\n\n\ndef create_main_project_tasks(\n session: Session,\n project: ElmProject,\n project_config: ProjectConfig,\n run_config: RunConfig):\n task_name = '{}/{}'.format(project_config.fake_user, project_config.fake_project)\n project_modules = list(elm_project.glob_project_modules(\n project, project_config))\n project_as_package = project.as_package(project_config)\n file_dep = [run_config.elm_path] if run_config.elm_path else []\n file_dep.extend([module.path for module in project_modules])\n uptodate_config = {'elm_json': project_as_package.as_json()}\n\n build_src_dir = run_config.build_path / 'src'\n docs_actions = [\n (create_folder, (str(run_config.build_path),)),\n (actions.write_project_elm_json, (\n project,\n project_config,\n [module.name for module in project_modules],\n run_config.build_path,\n )),\n (create_folder, (str(build_src_dir),)),\n actions.SyncSources(project, build_src_dir),\n (actions.run_elm_codeshift, (build_src_dir,)),\n (actions.validate_elm_path, (run_config.elm_path,)),\n ]\n\n if isinstance(run_config, Validate):\n # don't update the final artifact; write to build dir instead\n docs_path = run_config.build_path / project.DOCS_FILENAME\n docs_actions.append(actions.ElmMake(run_config.elm_path, run_config.build_path, docs_path))\n yield {\n 'basename': 'validate_docs_json',\n 'name': task_name,\n 'actions': docs_actions,\n 'targets': [],\n 'file_dep': file_dep,\n 'uptodate': [config_changed(uptodate_config)],\n }\n return\n\n # project docs.json\n project_output_path = package_tasks.package_docs_root(\n run_config.output_path, project_as_package)\n docs_actions.insert(0, (create_folder, (str(project_output_path),)))\n docs_path = project_output_path / project.DOCS_FILENAME\n docs_actions.append(actions.ElmMake(run_config.elm_path, run_config.build_path, docs_path))\n\n yield {\n 'basename': 'build_docs_json',\n 'name': task_name,\n 'actions': docs_actions,\n 'targets': [docs_path],\n 'file_dep': file_dep,\n 'uptodate': [config_changed(uptodate_config)],\n }\n\n yield from package_tasks.create_package_page_tasks(\n package_tasks.Context.Project,\n session,\n project_as_package,\n [module.name for module in project_modules],\n run_config)\n", "meta": {"content_hash": "4a85bf4941be040c258baa7ba3ca9e22", "timestamp": "", "source": "github", "line_count": 126, "max_line_length": 119, "avg_line_length": 40.56349206349206, "alnum_prop": 0.6388182351790256, "repo_name": "ento/elm-doc", "id": "ffe0d0003529c06405aec67bc31966fa695228f3", "size": "5111", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/elm_doc/tasks/project.py", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "Elm", "bytes": "280"}, {"name": "Nix", "bytes": "3275"}, {"name": "Python", "bytes": "116334"}, {"name": "Shell", "bytes": "778"}]}} {"text": "package org.cyclops.integrateddynamics.api.evaluate.variable;\n\n/**\n * A value type that can be null.\n * @author rubensworks\n */\npublic interface IValueTypeNullable extends IValueType {\n\n public boolean isNull(V a);\n\n}\n", "meta": {"content_hash": "883375b764d897e795f8c0ea63279318", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 77, "avg_line_length": 22.0, "alnum_prop": 0.743801652892562, "repo_name": "CyclopsMC/IntegratedDynamics", "id": "b9a6231daf64e25814bb93382da5ade26c6e2650", "size": "242", "binary": false, "copies": "1", "ref": "refs/heads/master-1.19", "path": "src/main/java/org/cyclops/integrateddynamics/api/evaluate/variable/IValueTypeNullable.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "3437058"}]}} {"text": "The purpose of this guide is to provide the reader with step by step instructions on how to deploy Kubernetes on vSphere infrastructure. The instructions use `kubeadm`, a tool built to provide best-practice \u201cfast paths\u201d for creating Kubernetes clusters. The reader will also learn how to deploy the Container Storage Interface and Cloud Provider Interface plugins for vSphere specific operations. At the end of this tutorial you will have a fully running K8s on vSphere environment that allows for dynamic provisioning of volumes.\n\n## Prerequisites\n\nThis section will cover the prerequisites that need to be in place before attempting the deployment.\n\n### vSphere requirements\n\nvSphere 6.7U3 (or later) is a prerequisite for using CSI and CPI at the time of writing. This may change going forward, and the documentation will be updated to reflect any changes in this support statement. If you are on a vSphere version that is below 6.7 U3, you can either upgrade vSphere to 6.7U3 or follow one of the tutorials for earlier vSphere versions. Here is the tutorial on deploying Kubernetes with kubeadm, using the VCP - [Deploying Kubernetes using kubeadm with the vSphere Cloud Provider (in-tree)](./k8s-vcp-on-vsphere-with-kubeadm.md).\n\n### Firewall requirements\n\nProviding the K8s master node(s) access to the vCenter management interface will be sufficient, given the CPI and CSI pods are deployed on the master node(s). Should these components be deployed on worker nodes or otherwise - those nodes will also need access to the vCenter management interface.\n\nIf you want to use topology-aware volume provisioning and the late binding feature using `zone`/`region`, the node need to discover its topology by connecting to the vCenter, for this every node should be able to communicate to the vCenter. You can disable this optional feature if you want to open only the master node to the vCenter management interface.\n\n### Recommended Guest Operating System\n\nVMware recommends that you create a virtual machine template using Guest OS Ubuntu 18.04.1 LTS (Bionic Beaver) 64-bit PC (AMD64) Server. Check it out on [VMware PartnerWeb](http://partnerweb.vmware.com/GOSIG/Ubuntu_18_04_LTS.html). This template is cloned to act as base images for your Kubernetes cluster. For instructions on how to do this, please refer to the guidance provided in [this blog post by Myles Gray of VMware](https://blah.cloud/kubernetes/creating-an-ubuntu-18-04-lts-cloud-image-for-cloning-on-vmware/). Ensure that SSH access is enabled on all nodes. This must be done in order to run commands on both the Kubernetes master and worker nodes in this guide.\n\n### Virtual Machine Hardware requirements\n\nVirtual Machine Hardware must be version 15 or higher. For Virtual Machine CPU and Memory requirements, size adequately based on workload requirements.\nVMware also recommend that virtual machines use the VMware Paravirtual SCSI controller for Primary Disk on the Node VM. This should be the default, but it is always good practice to check.\nFinally, the disk.EnableUUID parameter must be set for each node VMs. This step is necessary so that the VMDK always presents a consistent UUID to the VM, thus allowing the disk to be mounted properly.\nIt is recommended to not take snapshots of CNS node VMs to avoid errors and unpredictable behavior.\n\n### Docker Images\n\nThe following is the list of docker images that are required for the installation of CSI and CPI on Kubernetes. These images are automatically pulled in when CSI and CPI manifests are deployed.\nVMware distributes and recommends the following images:\n\n```bash\ngcr.io/cloud-provider-vsphere/csi/release/driver:v1.0.1\ngcr.io/cloud-provider-vsphere/csi/release/syncer:v1.0.1\nhttp://gcr.io/cloud-provider-vsphere/cpi/release/manager:v1.0.0\n```\n\nIn addition, you can use the following images or any of the open source or commercially available container images appropriate for the CSI deployment. Note that the tags reference the version of various components. This will change with future versions:\n\n```bash\nquay.io/k8scsi/csi-provisioner:v1.2.2\nquay.io/k8scsi/csi-attacher:v1.1.1\nquay.io/k8scsi/csi-node-driver-registrar:v1.1.0\nquay.io/k8scsi/livenessprobe:v1.1.0\nk8s.gcr.io/kube-apiserver:v1.14.2\nk8s.gcr.io/kube-controller-manager:v1.14.2\nk8s.gcr.io/kube-scheduler:v1.14.2\nk8s.gcr.io/kube-proxy:v1.14.2\nk8s.gcr.io/pause:3.1\nk8s.gcr.io/etcd:3.3.10\nk8s.gcr.io/coredns:1.3.1\n```\n\n### Tools\n\nIf you plan to deploy Kubernetes on vSphere from a MacOS environment, the `brew` package manager may be used to install and manage the necessary tools. If using Linux or Windows environment to initiate the deployment, links to the tools are included. Follow the tool specific instructions for installing the tools on the different operating systems.\n\nFor each tool, the brew install command for MacOS is shown here.\n\n* `brew` - \n* `govc` - brew tap govmomi/tap/govc && brew install govmomi/tap/govc\n* `kubectl` - brew install kubernetes-cli\n* `tmux` (optional) - brew install tmux\n\nHere are the links to the tools and install instructions for other operating systems:\n\n* [govc for other Operating Systems](https://github.com/vmware/govmomi/tree/master/govc) - version 0.20.0 or higher recommended\n* [kubectl for other Operating Systems](https://kubernetes.io/docs/tasks/tools/install-kubectl/)\n* [tmux for other Operating Systems](https://github.com/tmux/tmux)\n\n## Setting up VMs and Guest OS\n\nThe next step is to install the necessary Kubernetes components on the Ubuntu OS virtual machines. Some components must be installed on all of the nodes. In other cases, some of the components need only be installed on the master, and in other cases, only the workers. In each case, where the components are installed is highlighted.\nAll installation and configuration commands should be executed with root privilege. You can switch to the root environment using the \"sudo su\" command.\nSetup steps required on all nodes\nThe following section details the steps that are needed on both the master and worker nodes.\n\n### Install VMTools (if necessary)\n\nFor more information about VMTools including installation, please visit the [official documentation](https://docs.vmware.com/en/VMware-vSphere/6.7/com.vmware.vsphere.html.hostclient.doc/GUID-28C39A00-743B-4222-B697-6632E94A8E72.html).\n\n### disk.EnableUUID=1\n\nThe following govc commands will set the disk.EnableUUID=1 on all nodes.\n\n```bash\n# export GOVC_INSECURE=1\n# export GOVC_URL='https://'\n# export GOVC_USERNAME=VC_Admin_User\n# export GOVC_PASSWORD=VC_Admin_Passwd\n\n# govc ls\n/datacenter/vm\n/datacenter/network\n/datacenter/host\n/datacenter/datastore\n```\n\nTo retrieve all Node VMs, use the following command:\n\n```bash\n# govc ls //vm\n/datacenter/vm/k8s-node3\n/datacenter/vm/k8s-node4\n/datacenter/vm/k8s-node1\n/datacenter/vm/k8s-node2\n/datacenter/vm/k8s-master\n```\n\nTo use govc to enable Disk UUID, use the following command:\n\n```bash\n# govc vm.change -vm '/datacenter/vm/k8s-node1' -e=\"disk.enableUUID=1\"\n# govc vm.change -vm '/datacenter/vm/k8s-node2' -e=\"disk.enableUUID=1\"\n# govc vm.change -vm '/datacenter/vm/k8s-node3' -e=\"disk.enableUUID=1\"\n# govc vm.change -vm '/datacenter/vm/k8s-node4' -e=\"disk.enableUUID=1\"\n# govc vm.change -vm '/datacenter/vm/k8s-master' -e=\"disk.enableUUID=1\"\n```\n\nFurther information on disk.enableUUID can be found in [VMware Knowledgebase Article 52815](https://kb.vmware.com/s/article/52815).\n\n### Upgrade Virtual Machine Hardware\n\nVM Hardware should be at version 15 or higher.\n\n```bash\n# govc vm.upgrade -version=15 -vm '/datacenter/vm/k8s-node1'\n# govc vm.upgrade -version=15 -vm '/datacenter/vm/k8s-node2'\n# govc vm.upgrade -version=15 -vm '/datacenter/vm/k8s-node3'\n# govc vm.upgrade -version=15 -vm '/datacenter/vm/k8s-node4'\n# govc vm.upgrade -version=15 -vm '/datacenter/vm/k8s-master'\n```\n\nCheck the VM Hardware version after running the above command:\n\n```bash\n# govc vm.option.info '/datacenter/vm/k8s-node1' | grep HwVersion\nHwVersion: 15\n```\n\n### Disable Swap\n\nSSH into all K8s worker nodes and disable swap on all nodes including master node. This is a prerequisite for kubeadm. IF you have followed the previous guidance on how to create the OS template image, this step will have already been implemented.\n\n```bash\n# swapoff -a\n# vi /etc/fstab\n... remove any swap entry from this file ...\n```\n\n### Install Docker CE\n\nThe following steps should be used to install the container runtime on all of the nodes. Docker CE 18.06 must be used. Kubernetes has explicit supported versions, so it has to be this version\n\nFirst, update the apt package index.\n\n```bash\n# apt update\n```\n\nThe next step is to install packages to allow apt to use a repository over HTTPS.\n\n```bash\n# apt install ca-certificates software-properties-common \\\napt-transport-https curl -y\n```\n\nNow add Docker\u2019s official GPG key.\n\n```bash\n# curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -\n```\n\nTo complete the install, add the docker apt repository.\n\n```bash\n# add-apt-repository \"deb [arch=amd64] https://download.docker.com/linux/ubuntu bionic stable\"\n```\n\nNow we can install Docker CE. To install a specific version, replace the version string with the desired version number.\n\n```bash\n# apt update\n# apt install docker-ce=18.06.0~ce~3-0~ubuntu -y\n```\n\nFinally, setup the daemon parameters, like log rotation and cgroups.\n\n```bash\n# tee /etc/docker/daemon.json >/dev/null </etc/apt/sources.list.d/kubernetes.list\ndeb https://apt.kubernetes.io/ kubernetes-xenial main\nEOF\n```\n\nNext, install kubelet, kubectl and kubeadm.\n\n```bash\n# apt update\n\n# apt install -qy kubeadm=1.14.2-00 kubelet=1.14.2-00 kubectl=1.14.2-00\n```\n\nFinally, hold Kubernetes packages at their installed version so as not to upgrade unexpectedly on an apt upgrade.\n\n```bash\n# apt-mark hold kubelet kubeadm kubectl\n```\n\n### Setup step for flannel (Pod Networking)\n\nWe will be using flannel for pod networking in this example, so the below needs to be run on all nodes to pass bridged IPv4 traffic to iptables chains:\n\n```bash\n# sysctl net.bridge.bridge-nf-call-iptables=1\n```\n\nThat completes the common setup steps across both masters and worker nodes. We will now look at the steps involved in enabling the vSphere Cloud Provider Interface (CPI) and Container Storage Interface (CSI) before we are ready to deploy our Kubernetes cluster. Pay attention to where the steps are carried out, which will be either on the master or the worker nodes.\n\n## Installing the Kubernetes master node(s)\n\nAgain, these steps are only carried out on the master. Use kubeadminit to initialize the master node. In order to initialize the master node, we need to first of all create a `kubeadminit.yaml` manifest file that needs to be passed to the `kubeadm` command. Note the reference to an external cloud provider in the `nodeRegistration` part of the manifest.\n\n```bash\n# tee /etc/kubernetes/kubeadminit.yaml >/dev/null < discovery.yaml\n```\n\nThe `discovery.yaml` file will need to be copied to `/etc/kubernetes/discovery.yaml` on each of the worker nodes.\n\n## Installing the Kubernetes worker node(s)\n\nPerform this task on the worker nodes. Verify that you have installed Docker CE, kubeadm, etc, on the worker nodes before attempting to add them to the master.\n\nTo have the worker node(s) join to the master, a worker node kubeadm config yaml file must be created. Notice it is using `/etc/kubernetes/discovery.yaml` as the input for master discovery. We will show how to copy the file from the workers to the master in the next step. Also, notice that the token used in the worker node config is the same as we put in the master `kubeadminitmaster.yaml` configuration above. Finally, we once more specify that the cloud-provider is external for the workers, as we are going to use the new CPI.\n\n```bash\n# tee /etc/kubernetes/kubeadminitworker.yaml >/dev/null </dev/null </dev/null <`, the vCenter IP address in the keys of `stringData`, and the `username` and `password` for each key.\n\nThe secret for the vCenter at `10.0.0.1` might look like the following:\n\n```yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: cpi-engineering-secret\n namespace: kube-system\nstringData:\n 10.0.0.1.username: \"administrator@vsphere.local\"\n 10.0.0.1.password: \"password\"\n```\n\nThis is a second Secret example, this time showing an alternative format. This alternative format allows for IPv6 server addresses. This format requires server_{id}, username_{id} and password_{id} entries, where the entries have a common suffix per server:\n\n```yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: cpi-engineering-secret\n namespace: kube-system\nstringData:\n server_prod: fd01::102\n username_prod: \"administrator@vsphere.local\"\n password_prod: \"password\"\n server_test: 10.0.0.2\n username_test: \"developer@vsphere.local\"\n password_test: \"sekret\"\n```\n\nThen to create the secret, run the following command replacing the name of the YAML file with the one you have used:\n\n```bash\n# kubectl create -f cpi-engineering-secret.yaml\n```\n\nVerify that the credential secret is successfully created in the kube-system namespace.\n\n```bash\n# kubectl get secret cpi-engineering-secret --namespace=kube-system\nNAME TYPE DATA AGE\ncpi-engineering-secret Opaque 1 43s\n```\n\nIf you have multiple vCenters as in the example vsphere.conf above, your Kubernetes Secret YAML could look like the following to storage the vCenter credentials for vCenters at `1.1.1.1` and `192.168.0.1`:\n\n```yaml\napiVersion: v1\nkind: Secret\nmetadata:\n name: cpi-global-secret\n namespace: kube-system\nstringData:\n 1.1.1.1.username: \"administrator@vsphere.local\"\n 1.1.1.1.password: \"password\"\n 192.168.0.1.username: \"administrator@vsphere.local\"\n 192.168.0.1.password: \"password\"\n```\n\n### Zones and Regions for Pod and Volume Placement - CPI\n\nKubernetes allows you to place Pods and Persistent Volumes on specific parts of the underlying infrastructure, e.g. different DataCenters or different vCenters, using the concept of Zones and Regions. However, to use placement controls, the required configuration steps needs to be put in place at Kubernetes deployment time, and require additional settings in the vSphere.conf of both the CPI and CSI. For more information on how to implement zones/regions support, [there is a zones/regions tutorial on how to do it here](https://cloud-provider-vsphere.sigs.k8s.io/tutorials/deploying_cpi_with_multi_dc_vc_aka_zones.html).\n\nIf you are not interested in K8s object placement, this section can be ignored, and you can proceed with the remaining CPI setup steps.\n\n### Check that all nodes are tainted\n\nBefore installing vSphere Cloud Controller Manager, make sure all nodes are tainted with `node.cloudprovider.kubernetes.io/uninitialized=true:NoSchedule`. When the kubelet is started with \u201cexternal\u201d cloud provider, this taint is set on a node to mark it as unusable. After a controller from the cloud provider initializes this node, the kubelet removes this taint.\n\n```bash\n# kubectl describe nodes | egrep \"Taints:|Name:\"\nName: k8s-master\nTaints: node-role.kubernetes.io/master:NoSchedule\nName: k8s-node1\nTaints: node.cloudprovider.kubernetes.io/uninitialized=true:NoSchedule\nName: k8s-node2\nTaints: node.cloudprovider.kubernetes.io/uninitialized=true:NoSchedule\nName: k8s-node3\nTaints: node.cloudprovider.kubernetes.io/uninitialized=true:NoSchedule\nName: k8s-node4\nTaints: node.cloudprovider.kubernetes.io/uninitialized=true:NoSchedule\n```\n\n### Deploy the CPI manifests\n\nThere are 3 manifests that must be deployed to install the vSphere Cloud Provider Interface. The following example applies the RBAC roles and the RBAC bindings to your Kubernetes cluster. It also deploys the Cloud Controller Manager in a DaemonSet.\n\n```bash\n# kubectl apply -f https://raw.githubusercontent.com/kubernetes/cloud-provider-vsphere/master/manifests/controller-manager/cloud-controller-manager-roles.yaml\nclusterrole.rbac.authorization.k8s.io/system:cloud-controller-manager created\n\n# kubectl apply -f https://raw.githubusercontent.com/kubernetes/cloud-provider-vsphere/master/manifests/controller-manager/cloud-controller-manager-role-bindings.yaml\nclusterrolebinding.rbac.authorization.k8s.io/system:cloud-controller-manager created\n\n# kubectl apply -f https://github.com/kubernetes/cloud-provider-vsphere/raw/master/manifests/controller-manager/vsphere-cloud-controller-manager-ds.yaml\nserviceaccount/cloud-controller-manager created\ndaemonset.extensions/vsphere-cloud-controller-manager created\nservice/vsphere-cloud-controller-manager created\n```\n\n### Verify that the CPI has been successfully deployed\n\nVerify vsphere-cloud-controller-manager is running and all other system pods are up and running (note that the coredns pods were not running previously - they should be running now as the taints have been removed by installing the CPI):\n\n```bash\n# kubectl get pods --namespace=kube-system\nNAME READY STATUS RESTARTS AGE\ncoredns-fb8b8dccf-bq7qq 1/1 Running 0 71m\ncoredns-fb8b8dccf-r47q2 1/1 Running 0 71m\netcd-k8s-master 1/1 Running 0 69m\nkube-apiserver-k8s-master 1/1 Running 0 70m\nkube-controller-manager-k8s-master 1/1 Running 0 69m\nkube-flannel-ds-amd64-7kmk9 1/1 Running 0 38m\nkube-flannel-ds-amd64-dtvbg 1/1 Running 0 63m\nkube-flannel-ds-amd64-hq57c 1/1 Running 0 30m\nkube-flannel-ds-amd64-j7g4s 1/1 Running 0 22m\nkube-flannel-ds-amd64-q4zsn 1/1 Running 0 21m\nkube-proxy-6jcng 1/1 Running 0 30m\nkube-proxy-bh8kh 1/1 Running 0 21m\nkube-proxy-rb9xp 1/1 Running 0 22m\nkube-proxy-srhpj 1/1 Running 0 71m\nkube-proxy-vh4lg 1/1 Running 0 38m\nkube-scheduler-k8s-master 1/1 Running 0 70m\nvsphere-cloud-controller-manager-549hb 1/1 Running 0 25s\n```\n\n### Check that all nodes are untainted\n\nVerify node.cloudprovider.kubernetes.io/uninitialized taint is removed from all nodes.\n\n```bash\n# kubectl describe nodes | egrep \"Taints:|Name:\"\nName: k8s-master\nTaints: node-role.kubernetes.io/master:NoSchedule\nName: k8s-node1\nTaints: \nName: k8s-node2\nTaints: \nName: k8s-node3\nTaints: \nName: k8s-node4\nTaints: \n```\n\nNote: If you happen to make an error with the `vsphere.conf`, simply delete the CPI components and the configMap, make any necessary edits to the configMap `vSphere.conf` file, and reapply the steps above.\n\nYou may now remove the `vsphere.conf` file created at `/etc/kubernetes/`.\n\n## Install vSphere Container Storage Interface Driver\n\nNow that the CPI is installed, we can focus on the CSI. Please visit to install vSphere CSI Driver.\n\n## Sample manifests to test CSI driver functionality\n\nThe following are some sample manifests that can be used to verify that some provisioning workflows using the vSphere CSI driver are working as expected.\n\nThe example provided here will show how to create a stateful containerized application and use the vSphere Client to access the volumes that back your application.\nThe following sample workflow shows how to deploy a MongoDB application with one replica.\n\nWhile performing the workflow tasks, you alternate the roles of a vSphere user and Kubernetes user. The tasks use the following items:\n\n* Storage class YAML file\n* MongoDB service YAML file\n* MongoDB StatefulSet YAML file\n\n### Create a Storage Policy\n\nThe virtual disk (VMDK) that will back your containerized application needs to meet specific storage requirements. As a vSphere user, you create a VM storage policy based on the requirements provided to you by the Kubernetes user.\nThe storage policy will be associated with the VMDK backing your application.\nIf you have multiple vCenter Server instances in your environment, create the VM storage policy on each instance. Use the same policy name across all instances.\n\n* In the vSphere Client, on the main landing page, select `VM Storage Policies`.\n* Under `Policies and Profiles`, select `VM Storage Policies`.\n* Click `Create VM Storage Policy`.\n* Enter the policy name and description, and click Next. For the purposes of this demonstration we will name it `Space-Efficient`.\n* On the Policy structure page under Datastore-specific rules, select `Enable rules for \"vSAN\" storage` and click Next.\n* On the vSAN page, we will keep the defaults for this policy, which is `standard cluster` and `RAID-1 (Mirroring)`.\n* On the Storage compatibility page, review the list of vSAN datastores that match this policy and click Next.\n* On the Review and finish page, review the policy settings, and click Finish.\n\n![Space-Efficient Storage Policy Review](https://raw.githubusercontent.com/kubernetes/cloud-provider-vsphere/master/docs/images/space-efficient.png)\n\nYou can now inform the Kubernetes user of the storage policy name. The VM storage policy you created will be used as a part of storage class definition for dynamic volume provisioning.\n\n### Create a StorageClass\n\nAs a Kubernetes user, define and deploy the storage class that references previously created VM storage policy. We will use kubectl to perform the following steps. Generally, you provide the information to kubectl in a YAML file. kubectl converts the information to JSON when making the API request. We will now create a StorageClass YAML file that describes storage requirements for the container and references the VM storage policy to be used. The `csi.vsphere.vmware.com` is the name of the vSphere CSI provisioner, and is what is placed in the provisioner field in the StorageClass yaml. The following sample YAML file includes the Space-Efficient storage policy that you created earlier using the vSphere Client. The resulting persistent volume VMDK is placed on a compatible datastore with the maximum free space that satisfies the Space-Efficient storage policy requirements.\n\n```bash\n# cat mongodb-storageclass.yaml\nkind: StorageClass\napiVersion: storage.k8s.io/v1\nmetadata:\n name: mongodb-sc\n annotations:\n storageclass.kubernetes.io/is-default-class: \"true\"\nprovisioner: csi.vsphere.vmware.com\nparameters:\n storagepolicyname: \"Space-Efficient\"\n fstype: ext4\n```\n\n```bash\n# kubectl create -f mongodb-storageclass.yaml\nstorageclass.storage.k8s.io/mongodb-sc created\n```\n\n```bash\n# kubectl get storageclass mongodb-sc\nNAME PROVISIONER AGE\nmongodb-sc csi.vsphere.vmware.com 5s\n```\n\n### Create a Service\n\nAs a Kubernetes user, define and deploy a Kubernetes Service. The Service provides a networking endpoint for the application.\nThe following is a sample YAML file that defines the service for the MongoDB application.\n\n```bash\n# cat mongodb-service.yaml\napiVersion: v1\nkind: Service\nmetadata:\n name: mongodb-service\n labels:\n name: mongodb-service\nspec:\n ports:\n - port: 27017\n targetPort: 27017\n clusterIP: None\n selector:\n role: mongo\n```\n\n```bash\n# kubectl create -f mongodb-service.yaml\nservice/mongodb-service created\n```\n\n```bash\n# kubectl get svc mongodb-service\nNAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE\nmongodb-service ClusterIP None 27017/TCP 15s\n```\n\n### Create and Deploy a StatefulSet\n\nAs a Kubernetes user, define and deploy a StatefulSet that specifies the number of replicas to be used for your application.\nFirst, create secret for the key file. MongoDB will use this key to communicate with the internal cluster.\n\n```bash\n# openssl rand -base64 741 > key.txt\n```\n\n```bash\n# kubectl create secret generic shared-bootstrap-data --from-file=internal-auth-mongodb-keyfile=key.txt\nsecret/shared-bootstrap-data created\n```\n\nNext we need to define specifications for the containerized application in the StatefulSet YAML file . The following sample specification requests one instance of the MongoDB application, specifies the external image to be used, and references the mongodb-sc storage class that you created earlier. This storage class maps to the Space-Efficient VM storage policy that you defined previously on the vSphere Client side.\n\nNote that this manifest expects that the Kubernetes node can reach the image called `mongo:3.4`. If your Kubernetes nodes are not able to reach external repositories, then this YAML file needs to be modified to reach your local internal repo. Of course, this repo also needs to contain the Mongo image. We have set the number of replicas to 3, indicating that there will be 3 Pods, 3 PVCs and 3 PVs instantiated as part of this StatefulSet.\n\n```bash\n# cat mongodb-statefulset.yaml\napiVersion: apps/v1\nkind: StatefulSet\nmetadata:\n name: mongod\nspec:\n serviceName: mongodb-service\n replicas: 3\n selector:\n matchLabels:\n role: mongo\n environment: test\n replicaset: MainRepSet\n template:\n metadata:\n labels:\n role: mongo\n environment: test\n replicaset: MainRepSet\n spec:\n containers:\n - name: mongod-container\n image: mongo:3.4\n command:\n - \"numactl\"\n - \"--interleave=all\"\n - \"mongod\"\n - \"--bind_ip\"\n - \"0.0.0.0\"\n - \"--replSet\"\n - \"MainRepSet\"\n - \"--auth\"\n - \"--clusterAuthMode\"\n - \"keyFile\"\n - \"--keyFile\"\n - \"/etc/secrets-volume/internal-auth-mongodb-keyfile\"\n - \"--setParameter\"\n - \"authenticationMechanisms=SCRAM-SHA-1\"\n resources:\n requests:\n cpu: 0.2\n memory: 200Mi\n ports:\n - containerPort: 27017\n volumeMounts:\n - name: secrets-volume\n readOnly: true\n mountPath: /etc/secrets-volume\n - name: mongodb-persistent-storage-claim\n mountPath: /data/db\n volumes:\n - name: secrets-volume\n secret:\n secretName: shared-bootstrap-data\n defaultMode: 256\n volumeClaimTemplates:\n - metadata:\n name: mongodb-persistent-storage-claim\n annotations:\n volume.beta.kubernetes.io/storage-class: \"mongodb-sc\"\n spec:\n accessModes: [ \"ReadWriteOnce\" ]\n resources:\n requests:\n storage: 1Gi\n```\n\n```bash\n# kubectl create -f mongodb-statefulset.yaml\nstatefulset.apps/mongo created\n```\n\nVerify that the MongoDB application has been deployed.\nWait for pods to start running and PVCs to be created for each replica.\n\n```bash\n# kubectl get statefulset mongod\nNAME READY AGE\nmongod 3/3 96s\n```\n\n```bash\n# kubectl get pod -l role=mongo\nNAME READY STATUS RESTARTS AGE\nmongod-0 1/1 Running 0 13h\nmongod-1 1/1 Running 0 13h\nmongod-2 1/1 Running 0 13h\n```\n\n```bash\n# kubectl get pvc\nNAME STATUS VOLUME CAPACITY ACCESS MODES STORAGECLASS AGE\nmongodb-persistent-storage-claim-mongod-0 Bound pvc-ea98b22a-b8cf-11e9-b1d3-005056a0e4f0 1Gi RWO mongodb-sc 13h\nmongodb-persistent-storage-claim-mongod-1 Bound pvc-0267fa7d-b8d0-11e9-b1d3-005056a0e4f0 1Gi RWO mongodb-sc 13h\nmongodb-persistent-storage-claim-mongod-2 Bound pvc-24d86a37-b8d0-11e9-b1d3-005056a0e4f0 1Gi RWO mongodb-sc 13h\n```\n\n### Set up the Mongo replica set configuration\n\nTo setup the Mongo replica set configuration, we need to connect to one of the mongod container processes to configure the replica set.\nRun the following command to connect to the first container. In the shell, initiate the replica set. You can rely on the host names to be the same, due to having employed the StatefulSet.\n\n```bash\n# kubectl exec -it mongod-0 -c mongod-container bash\nroot@mongod-0:/# mongo\nMongoDB shell version v3.4.22\nconnecting to: mongodb://127.0.0.1:27017\nMongoDB server version: 3.4.22\nWelcome to the MongoDB shell.\nFor interactive help, type \"help\".\nFor more comprehensive documentation, see\n http://docs.mongodb.org/\nQuestions? Try the support group\n http://groups.google.com/group/mongodb-user\n> rs.initiate({_id: \"MainRepSet\", version: 1, members: [\n... { _id: 0, host : \"mongod-0.mongodb-service.default.svc.cluster.local:27017\" },\n... { _id: 1, host : \"mongod-1.mongodb-service.default.svc.cluster.local:27017\" },\n... { _id: 2, host : \"mongod-2.mongodb-service.default.svc.cluster.local:27017\" }\n... ]});\n{ \"ok\" : 1 }\n```\n\nThis makes mongodb-0 the primary node and other two nodes are secondary.\n\n### Verify Cloud Native Storage functionality is working in vSphere\n\nAfter your application gets deployed, its state is backed by the VMDK file associated with the specified storage policy. As a vSphere administrator, you can review the VMDK that is created for your container volume.\nIn this step, we will verify that the Cloud Native Storage feature released with vSphere 6.7U3 is working. To go to the CNS UI, login to the vSphere client, then navigate to Datacenter \u2192 Monitor \u2192 Cloud Native Storage \u2192 Container Volumes and observe that the newly created persistent volumes are present. These should match the `kubectl get pvc` output from earlier. You can also monitor their storage policy compliance status.\n\n![Cloud Native Storage view of the MongoDB Persistent Volumes](https://raw.githubusercontent.com/kubernetes/cloud-provider-vsphere/master/docs/images/cns-mongo-pvs-labels.png)\n\nThat completes the testing. CSI, CPI and CNS are all now working.\n", "meta": {"content_hash": "45327874560df60c2e202694a20a0fee", "timestamp": "", "source": "github", "line_count": 953, "max_line_length": 883, "avg_line_length": 44.92969569779643, "alnum_prop": 0.7326124527068055, "repo_name": "kubernetes/cloud-provider-vsphere", "id": "72cb5d96b9a2c02bb1625fe6cce8d2c24a68c374", "size": "42898", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/book/tutorials/kubernetes-on-vsphere-with-kubeadm.md", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Dockerfile", "bytes": "13339"}, {"name": "Go", "bytes": "862422"}, {"name": "Makefile", "bytes": "25413"}, {"name": "Mustache", "bytes": "1770"}, {"name": "Shell", "bytes": "39575"}]}} {"text": "\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\nAbout this guide and the Network Server documentation\r\n\r\n\r\n\r\n\r\n

About this guide and the Network Server documentation

\r\n\r\n\r\n
\r\n

This guide assumes that you are familiar with Derby features and tuning. Before reading this guide, you should first learn about basic Derby functionality\r\nby reading the Derby Developer's Guide. Also, because multi-user environments typically\r\nhave performance and tuning issues, you should read Tuning Derby.

\r\n\r\n
\r\n
\r\n
\r\n\r\n
\r\n
Related concepts
\r\n\r\n
\r\n
\r\n\r\n\r\n\r\n", "meta": {"content_hash": "c6f548d4af5e2bb9c99a2c88bc3f3e6f", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 261, "avg_line_length": 51.728813559322035, "alnum_prop": 0.713302752293578, "repo_name": "mminella/jsr-352-ri-tck", "id": "c9444e9e5e21f67be8e779765382e49fc95bdcae", "size": "3052", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "JSR352.BinaryDependencies/shipped/derby/docs/html/adminguide/cadminov38650.html", "mode": "33261", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "1743336"}, {"name": "Perl", "bytes": "80042"}, {"name": "Racket", "bytes": "180"}, {"name": "Ruby", "bytes": "1444"}, {"name": "Shell", "bytes": "45633"}]}} {"text": "// Copyright 2007-2010 Baptiste Lepilleur\n// Distributed under MIT license, or public domain if desired and\n// recognized in your jurisdiction.\n// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE\n\n#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED\n#define LIB_JSONCPP_JSON_TOOL_H_INCLUDED\n\n/* This header provides common string manipulation support, such as UTF-8,\n * portable conversion from/to string...\n *\n * It is an internal header that must not be exposed.\n */\n\nnamespace Json {\n\n/// Converts a unicode code-point to UTF-8.\nstatic inline std::string codePointToUTF8(unsigned int cp) {\n std::string result;\n\n // based on description from http://en.wikipedia.org/wiki/UTF-8\n\n if (cp <= 0x7f) {\n result.resize(1);\n result[0] = static_cast(cp);\n } else if (cp <= 0x7FF) {\n result.resize(2);\n result[1] = static_cast(0x80 | (0x3f & cp));\n result[0] = static_cast(0xC0 | (0x1f & (cp >> 6)));\n } else if (cp <= 0xFFFF) {\n result.resize(3);\n result[2] = static_cast(0x80 | (0x3f & cp));\n result[1] = 0x80 | static_cast((0x3f & (cp >> 6)));\n result[0] = 0xE0 | static_cast((0xf & (cp >> 12)));\n } else if (cp <= 0x10FFFF) {\n result.resize(4);\n result[3] = static_cast(0x80 | (0x3f & cp));\n result[2] = static_cast(0x80 | (0x3f & (cp >> 6)));\n result[1] = static_cast(0x80 | (0x3f & (cp >> 12)));\n result[0] = static_cast(0xF0 | (0x7 & (cp >> 18)));\n }\n\n return result;\n}\n\n/// Returns true if ch is a control character (in range [0,32[).\nstatic inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; }\n\nenum {\n /// Constant that specify the size of the buffer that must be passed to\n /// uintToString.\n uintToStringBufferSize = 3 * sizeof(LargestUInt) + 1\n};\n\n// Defines a char buffer for use with uintToString().\ntypedef char UIntToStringBuffer[uintToStringBufferSize];\n\n/** Converts an unsigned integer to string.\n * @param value Unsigned interger to convert to string\n * @param current Input/Output string buffer.\n * Must have at least uintToStringBufferSize chars free.\n */\nstatic inline void uintToString(LargestUInt value, char*& current) {\n *--current = 0;\n do {\n *--current = char(value % 10) + '0';\n value /= 10;\n } while (value != 0);\n}\n\n/** Change ',' to '.' everywhere in buffer.\n *\n * We had a sophisticated way, but it did not work in WinCE.\n * @see https://github.com/open-source-parsers/jsoncpp/pull/9\n */\nstatic inline void fixNumericLocale(char* begin, char* end) {\n while (begin < end) {\n if (*begin == ',') {\n *begin = '.';\n }\n ++begin;\n }\n}\n\n} // namespace Json {\n\n#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED", "meta": {"content_hash": "948136242f3847cd8316adcba9e59d2b", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 80, "avg_line_length": 31.080459770114942, "alnum_prop": 0.6479289940828402, "repo_name": "Zuppka/RLG", "id": "5e66b117b96fec78e548981c6132d972678f9696", "size": "2704", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "include/json_tool.h", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "189304"}, {"name": "C++", "bytes": "1854318"}]}} {"text": "distinct()->lists('name','id');\n }\n\n public function getName()\n {\n return $this->attributes['name'];\n }\n\n}\n", "meta": {"content_hash": "b5af477c5b65a19be5e61913fb165b32", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 67, "avg_line_length": 14.44, "alnum_prop": 0.5318559556786704, "repo_name": "kamyh/ArcNotes", "id": "893b19e22f3339012410c6abd2e36a1d5dbd8127", "size": "361", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/models/Cities.php", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "7390"}, {"name": "JavaScript", "bytes": "6436"}, {"name": "PHP", "bytes": "549089"}, {"name": "Python", "bytes": "556"}]}} {"text": "SYNONYM\n\n#### According to\nThe Catalogue of Life, 3rd January 2011\n\n#### Published in\nnull\n\n#### Original name\nnull\n\n### Remarks\nnull", "meta": {"content_hash": "15b4ea027ea00f2a061025cec15ac1ae", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "4ea5f07221765efd9fe7e07b3f1ae82134cfc144", "size": "186", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Liliopsida/Poales/Poaceae/Hordeum/Hordeum muticum/ Syn. Hordeum muticum andicola/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "ACCEPTED\n\n#### According to\nNUB Generator [autonym]\n\n#### Published in\nnull\n\n#### Original name\nnull\n\n### Remarks\nnull", "meta": {"content_hash": "2fbce14ec94f9576820c9b1aade4dd66", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 23, "avg_line_length": 9.076923076923077, "alnum_prop": 0.6779661016949152, "repo_name": "mdoering/backbone", "id": "da088a8bc506c8e05d499b7124aa85fdca2b43f7", "size": "173", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Apocynaceae/Asclepias/Asclepias ovalifolia/Asclepias ovalifolia ovalifolia/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "const prerender = require('prerender')\nconst server = prerender()\nserver.start()\n", "meta": {"content_hash": "8e4986a0b7909f3324ef33525bc55ddd", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 38, "avg_line_length": 27.0, "alnum_prop": 0.7530864197530864, "repo_name": "imuntil/nodejs", "id": "d3f0f4bf0d15f0387bac88d7c049b0614c8880ae", "size": "81", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Prerender/start/server.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "110680"}, {"name": "D", "bytes": "1339"}, {"name": "HTML", "bytes": "1534621"}, {"name": "Handlebars", "bytes": "44291"}, {"name": "Java", "bytes": "393"}, {"name": "JavaScript", "bytes": "2978825"}, {"name": "Less", "bytes": "682"}, {"name": "Pug", "bytes": "21144"}, {"name": "SCSS", "bytes": "5505"}, {"name": "Shell", "bytes": "62"}, {"name": "TypeScript", "bytes": "59149"}, {"name": "Vue", "bytes": "179321"}]}} {"text": "var test = require('tape');\nvar shell = require('shelljs');\nvar path = require('path');\nvar fs = require('fs');\n\ntest('bundle', function(t) {\n shell.cd(__dirname);\n var indexPath = path.resolve(__dirname, 'bundle_index.js');\n var cliPath = path.resolve(__dirname, '../runtimeify.js');\n var outPath = path.resolve(__dirname, 'initrd');\n\n if (shell.exec(cliPath + ' ' + indexPath).code !== 0) {\n shell.exit(1);\n }\n\n fs.statSync(outPath);\n shell.rm(outPath);\n t.end();\n});\n", "meta": {"content_hash": "bde7529e81d211f94392f31c7e049b35", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 61, "avg_line_length": 25.36842105263158, "alnum_prop": 0.6203319502074689, "repo_name": "runtimejs/runtimeify", "id": "3279384ba2466bb989ee7a5992234ea5258acf34", "size": "482", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/test.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "JavaScript", "bytes": "6364"}]}} {"text": "title: \"Mutators\"\ndescription: \"Reference documentation for Sensu Mutators.\"\nversion: 1.1\nweight: 7\n---\n\n# Sensu Mutators\n\n## Reference documentation\n\n- [What is a Sensu mutator?](#what-is-a-sensu-mutator)\n - [The Sensu mutator specification](#the-sensu-mutator-specification)\n - [When to use a mutator](#when-to-use-a-mutator)\n- [How do Sensu mutators work?](#how-do-sensu-mutators-work)\n- [Mutator commands](#mutator-commands)\n - [What is a mutator command?](#what-is-a-mutator-command)\n - [Mutator command arguments](#mutator-command-arguments)\n - [How and where are mutator commands executed?](#how-and-where-are-mutator-commands-executed)\n- [Mutator configuration](#mutator-configuration)\n - [Example mutator definition](#example-mutator-definition)\n - [Mutator definition specification](#mutator-definition-specification)\n - [Mutator name(s)](#mutator-names)\n - [Mutator attributes](#mutator-attributes)\n\n## What are Sensu mutators? {#what-are-sensu-mutators}\n\nSensu mutators are executable scripts or other programs that modify [event\ndata][1] for [Sensu event handlers][2] which may expect additional or modified\nevent data (e.g. custom attributes that are not provided by the default [event\ndata specification][3].\n\n### The Sensu mutator specification\n\n- Accept input/data via `STDIN`\n- Able to parse a JSON data payload (i.e. a [event data][1])\n- Output JSON data (the modified event data) to `STDOUT` or `STDERR`\n- Produce an exit status code to indicate state:\n - `0` indicates OK\n - exit status codes other than `0` indicates a failure\n\n### When to use a mutator\n\nMany [Sensu event handlers][2] will modify [event data][1] in the course of\nprocessing an [event][9], and in many cases this is recommended because\nmodifying the event data and performing some action in memory (in the same\nprocess) will result in better performance than [executing a mutator][5] _and_ a\nhandler (two separate processes). However, when multiple handlers require\nsimilar event data modifications, mutators provide the ability to avoid code\nduplication (DRY), and simplify\nevent handler logic.\n\n## How do Sensu mutators work?\n\nSensu mutators are applied when [event handlers][2] are configured to use a\n`mutator`. Prior to executing a Handler, the Sensu server will execute the\nconfigured `mutator`. If the mutator is successfully executed, the modified\nevent data is then provided to the handler and the handler will be executed. If\nthe mutator fails to execute for any reason, an error will be logged and the\nhandler will not be executed. The complete process may be described as follows:\n\n- When the Sensu server is processing an event, it will check for the definition\n of a `mutator`. Prior to executing each handler, the Sensu server will first\n execute the configured `mutator` (if any) for the handler\n- If the mutator is successfully executed (i.e. if it returns an exit status\n code of `0`), the modified event data is provided to the handler and the\n handler will be executed.\n- If the mutator fails to execute (i.e. returns a non-zero exit status code, or\n does not complete execution within the configured `timeout`), an error will be\n logged and the handler will not be executed\n\nPlease refer to the [Sensu event handler definition specification][8] for more\ninformation about applying a mutator to an event handler (see the `mutator`\nattribute).\n\n## Mutator commands\n\n### What is a mutator command?\n\nEach [Sensu mutator definition][6] defines a command to be executed. Mutator\ncommands are literally executable commands which will be executed on a [Sensu\nserver][4], run as the `sensu` user. Most mutator commands are provided by\n[Sensu plugins][7].\n\n### Mutator command arguments\n\nSensu mutator `command` attributes may include command line arguments for\ncontrolling the behavior of the `command` executable. Many [Sensu mutator\nplugins][7] provide support for command line arguments for reusability.\n\n### How and where are mutator commands executed?\n\nAs mentioned above, all mutator commands are executed by a [Sensu server][4] as\nthe `sensu` user. Commands must be executable files that are discoverable on the\nSensu server system (i.e. installed in a system `$PATH` directory).\n\n_NOTE: By default, the Sensu installer packages will modify the system `$PATH`\nfor the Sensu processes to include `/etc/sensu/plugins`. As a result, executable\nscripts (e.g. plugins) located in `/etc/sensu/plugins` will be valid commands.\nThis allows `command` attributes to use \"relative paths\" for Sensu plugin\ncommands;

e.g.: `\"command\": \"check-http.rb -u https://sensuapp.org\"`_\n\n## Mutator configuration\n\n### Example mutator definition\n\nThe following is an example Sensu mutator definition, a JSON configuration file\nlocated at `/etc/sensu/conf.d/example_mutator.json`. This mutator definition\nuses an imaginary [Sensu plugin][7] called `example_mutator.rb` to modify event\ndata prior to handling the event.\n\n~~~ json\n{\n \"mutators\": {\n \"example_mutator\": {\n \"command\": \"example_mutator.rb\"\n }\n }\n}\n~~~\n\n### Mutator definition specification\n\n#### Mutator name(s)\n\nEach mutator definition has a unique mutator name, used for the definition key.\nEvery mutator definition is within the `\"mutators\": {}` definition scope.\n\n- A unique string used to name/identify the mutator\n- Cannot contain special characters or spaces\n- Validated with [Ruby regex][10] `/^[\\w\\.-]+$/.match(\"mutator-name\")`\n\n#### Mutator attributes\n\n`command`\n: description\n : The mutator command to be executed. The event data is passed to the process\n via `STDIN`.\n: required\n : true\n: type\n : String\n: example\n : ~~~ shell\n \"command\": \"/etc/sensu/plugins/mutated.rb\"\n ~~~\n\n`timeout`\n: description\n : The mutator execution duration timeout in seconds (hard stop).\n: required\n : false\n: type\n : Integer\n: example\n : ~~~ shell\n \"timeout\": 30\n ~~~\n\n[1]: events.html#event-data\n[2]: handlers.html\n[3]: events.html#event-data-specification\n[4]: server.html\n[5]: #how-and-where-are-mutator-commands-executed\n[6]: #mutator-definition-specification\n[7]: plugins.html\n[8]: handlers.html#handler-definition-specification\n[9]: events.html\n[10]: http://ruby-doc.org/core-2.2.0/Regexp.html\n", "meta": {"content_hash": "0db453126667cf7c39a9fc117f5c259d", "timestamp": "", "source": "github", "line_count": 168, "max_line_length": 96, "avg_line_length": 37.220238095238095, "alnum_prop": 0.7454022069406685, "repo_name": "palourde/sensu-docs", "id": "66bb9d3b8e678bf43db952808fbb1c9ce3dc111b", "size": "6257", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/1.1/reference/mutators.md", "mode": "33188", "license": "mit", "language": [{"name": "Ruby", "bytes": "74696"}, {"name": "Shell", "bytes": "12056"}]}} {"text": "\npackage com.mgmtp.perfload.core.client.web.flow;\n\nimport static com.mgmtp.perfload.core.common.util.LtUtils.checkInterrupt;\n\nimport java.util.List;\nimport java.util.ListIterator;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.UUID;\n\nimport javax.inject.Inject;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.mgmtp.perfload.core.client.config.annotations.ExecutionId;\nimport com.mgmtp.perfload.core.client.config.scope.ExecutionScoped;\nimport com.mgmtp.perfload.core.client.runner.ErrorHandler;\nimport com.mgmtp.perfload.core.client.util.PlaceholderContainer;\nimport com.mgmtp.perfload.core.client.util.WaitingTimeManager;\nimport com.mgmtp.perfload.core.client.web.event.RequestFlowEvent;\nimport com.mgmtp.perfload.core.client.web.event.RequestFlowEventListener;\nimport com.mgmtp.perfload.core.client.web.request.InvalidRequestHandlerException;\nimport com.mgmtp.perfload.core.client.web.request.RequestHandler;\nimport com.mgmtp.perfload.core.client.web.response.DetailExtractor;\nimport com.mgmtp.perfload.core.client.web.response.HeaderExtractor;\nimport com.mgmtp.perfload.core.client.web.response.ResponseInfo;\nimport com.mgmtp.perfload.core.client.web.response.ResponseValidator;\nimport com.mgmtp.perfload.core.client.web.template.RequestTemplate;\nimport com.mgmtp.perfload.core.client.web.template.TemplateTransformer;\n\n/**\n * Default implementation of a {@link RequestFlowHandler}. It handles the complete logic for a run\n * of a Web load test and fires {@link RequestFlowEvent}s before and after request flows and before\n * and after requests. The \"after\" events are fired in {@code finally} blocks so that they are also\n * triggered in case of an exception. The exception will then be available through the\n * {@link RequestFlowEvent}.\n *\n * @author rnaegele\n */\n@ExecutionScoped\npublic final class DefaultRequestFlowHandler implements RequestFlowHandler {\n\tprivate final Logger log = LoggerFactory.getLogger(getClass());\n\n\tprivate final Map requestHandlers;\n\tprivate final List requestFlows;\n\tprivate final TemplateTransformer templateTransformer;\n\tprivate final ResponseValidator responseValidator;\n\tprivate final DetailExtractor detailExtractor;\n\tprivate final HeaderExtractor headerExtractor;\n\tprivate final WaitingTimeManager waitingTimeManager;\n\tprivate final PlaceholderContainer placeholderContainer;\n\tprivate final Set listeners;\n\tprivate final ErrorHandler errorHandler;\n\tprivate final UUID executionId;\n\n\t/**\n\t * Constructs a new instance.\n\t *\n\t * @param requestFlows\n\t * the list of {@link RequestFlow}s to be processed\n\t * @param requestHandlers\n\t * a map of {@link RequestHandler}s; must contain a request handler for each type of\n\t * request in the request flow\n\t * @param templateTransformer\n\t * the {@link TemplateTransformer} used to make request template executable\n\t * @param responseValidator\n\t * the {@link ResponseValidator} for validating the HTTP reponses\n\t * @param detailExtractor\n\t * the {@link DetailExtractor} for extracting details from the reponse bodies\n\t * @param headerExtractor\n\t * the {@link HeaderExtractor} for extracting headers from the reponses\n\t * @param waitingTimeManager\n\t * the {@link WaitingTimeManager} that introduces a waiting time as configured before\n\t * each request\n\t * @param placeholderContainer\n\t * the {@link PlaceholderContainer}\n\t * @param listeners\n\t * a set of {@link RequestFlowEventListener}s\n\t * @param errorHandler\n\t * the error handler which determines whether and exception should lead to the\n\t * abortion of the whole test\n\t * @param executionId\n\t * the execution id\n\t */\n\t@Inject\n\tpublic DefaultRequestFlowHandler(final List requestFlows, final Map requestHandlers,\n\t\t\tfinal TemplateTransformer templateTransformer, final ResponseValidator responseValidator,\n\t\t\tfinal DetailExtractor detailExtractor, final HeaderExtractor headerExtractor,\n\t\t\tfinal WaitingTimeManager waitingTimeManager, final PlaceholderContainer placeholderContainer,\n\t\t\tfinal Set listeners, final ErrorHandler errorHandler, @ExecutionId final UUID executionId) {\n\t\tthis.requestFlows = requestFlows;\n\t\tthis.requestHandlers = requestHandlers;\n\t\tthis.templateTransformer = templateTransformer;\n\t\tthis.responseValidator = responseValidator;\n\t\tthis.detailExtractor = detailExtractor;\n\t\tthis.headerExtractor = headerExtractor;\n\t\tthis.waitingTimeManager = waitingTimeManager;\n\t\tthis.placeholderContainer = placeholderContainer;\n\t\tthis.listeners = listeners;\n\t\tthis.errorHandler = errorHandler;\n\t\tthis.executionId = executionId;\n\t}\n\n\t@Override\n\tpublic void execute() throws Exception {\n\t\tException exception = null;\n\n\t\tfor (ListIterator it = requestFlows.listIterator(); it.hasNext();) {\n\t\t\tRequestFlow requestFlow = it.next();\n\t\t\t// 1-based index, so we use \"nextIndex()\"\n\t\t\tint flowIndex = it.nextIndex();\n\n\t\t\ttry {\n\t\t\t\t// fire event\n\t\t\t\tfireBeforeRequestFlow(flowIndex);\n\n\t\t\t\t// process requests\n\t\t\t\tfor (final RequestTemplate template : requestFlow) {\n\n\t\t\t\t\tRequestTemplate executableTemplate = null;\n\t\t\t\t\tResponseInfo responseInfo = null;\n\t\t\t\t\tUUID requestId = UUID.randomUUID();\n\t\t\t\t\ttry {\n\t\t\t\t\t\twaitingTimeManager.sleepBeforeRequest();\n\n\t\t\t\t\t\t// check for interrupt and abort if necessary\n\t\t\t\t\t\tcheckInterrupt();\n\n\t\t\t\t\t\t// fire event, also called for skipped requests, because event handler may decide whether to skip\n\t\t\t\t\t\t// must be called before the template is made executable, so parameters\n\t\t\t\t\t\t// can be put into the placeholder container\n\t\t\t\t\t\tfireBeforeRequest(flowIndex, template);\n\n\t\t\t\t\t\texecutableTemplate = templateTransformer.makeExecutable(template, placeholderContainer);\n\t\t\t\t\t\tif (executableTemplate.isSkipped()) {\n\t\t\t\t\t\t\tlog.info(\"Skipping request: {}\", executableTemplate);\n\t\t\t\t\t\t\tcontinue;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tlog.debug(\"Executing request template: {}\", executableTemplate);\n\t\t\t\t\t\t\n\t\t\t\t\t\t// look up request handler for the request's type\n\t\t\t\t\t\tString type = template.getType();\n\t\t\t\t\t\tRequestHandler handler = requestHandlers.get(type);\n\t\t\t\t\t\tif (handler == null) {\n\t\t\t\t\t\t\tthrow new InvalidRequestHandlerException(String.format(\"No request handler for type '%s' available.\",\n\t\t\t\t\t\t\t\t\ttype));\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tresponseInfo = handler.execute(executableTemplate, requestId);\n\t\t\t\t\t\tif (responseInfo != null) {\n\t\t\t\t\t\t\tlog.debug(responseInfo.toString());\n\n\t\t\t\t\t\t\t// process response\n\t\t\t\t\t\t\tif (executableTemplate.isValidateResponse()) {\n\t\t\t\t\t\t\t\tresponseValidator.validate(responseInfo);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\tdetailExtractor.extractDetails(responseInfo, executableTemplate.getDetailExtractions(),\n\t\t\t\t\t\t\t\t\tplaceholderContainer);\n\t\t\t\t\t\t\theaderExtractor.extractHeaders(responseInfo, executableTemplate.getHeaderExtractions(),\n\t\t\t\t\t\t\t\t\tplaceholderContainer);\n\t\t\t\t\t\t}\n\t\t\t\t\t} catch (Exception ex) {\n\t\t\t\t\t\texception = ex;\n\n\t\t\t\t\t\t// Handle error. Depending on the exception, the error handler may choose to abort the test.\n\t\t\t\t\t\terrorHandler.execute(ex);\n\n\t\t\t\t\t\tif (responseInfo != null) {\n\t\t\t\t\t\t\t// In case of an error we additionally log the response info at warn level\n\t\t\t\t\t\t\tlog.warn(responseInfo.toString());\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// In any case, we don't want to execute the remainder of\n\t\t\t\t\t\t// the current request flow if an exception occurred and break out of the loop.\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t} finally {\n\t\t\t\t\t\tRequestTemplate template4Event = executableTemplate != null ? executableTemplate : template;\n\t\t\t\t\t\t// ResponseInfo is null the request is skipped of if an exception occurs when the HttpClient executes\n\t\t\t\t\t\t// the request. In order to make sure that we still get an entry in the measuring log in the case of and\n\t\t\t\t\t\t// exceptiohn, we need to create one. However, it must remain null, when the request is skipped in\n\t\t\t\t\t\t// order to avoid an entry in the measuring log.\n\t\t\t\t\t\tif (responseInfo == null && executableTemplate != null && !executableTemplate.isSkipped()) {\n\t\t\t\t\t\t\tresponseInfo = new ResponseInfo.Builder()\n\t\t\t\t\t\t\t\t\t.methodType(template4Event.getType())\n\t\t\t\t\t\t\t\t\t.uri(template4Event.getUri())\n\t\t\t\t\t\t\t\t\t.uriAlias(template4Event.getUriAlias())\n\t\t\t\t\t\t\t\t\t.timestamp(System.currentTimeMillis())\n\t\t\t\t\t\t\t\t\t.executionId(executionId)\n\t\t\t\t\t\t\t\t\t.requestId(requestId)\n\t\t\t\t\t\t\t\t\t.build();\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// always fire event, including skipped requests\n\t\t\t\t\t\tfireAfterRequest(flowIndex, template4Event, exception, responseInfo);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} catch (Exception ex) {\n\t\t\t\texception = ex;\n\t\t\t\terrorHandler.execute(ex);\n\t\t\t} finally {\n\n\t\t\t\t// fire event\n\t\t\t\tfireAfterRequestFlow(flowIndex, exception);\n\t\t\t}\n\n\t\t\t// In case of an exception, we don't want to execute potential subsequent request flows.\n\t\t\tif (exception != null) {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t}\n\n\tprivate void fireBeforeRequestFlow(final int flowIndex) {\n\t\tRequestFlowEvent event = new RequestFlowEvent(flowIndex);\n\t\tlog.debug(\"fireBeforeRequestFlow: {}\", event);\n\t\tfor (RequestFlowEventListener listener : listeners) {\n\t\t\tlog.debug(\"Executing listener: {}\", listener);\n\t\t\tlistener.beforeRequestFlow(event);\n\t\t}\n\t}\n\n\tprivate void fireAfterRequestFlow(final int flowIndex, final Exception ex) {\n\t\tRequestFlowEvent event = new RequestFlowEvent(flowIndex, ex);\n\t\tlog.debug(\"fireAfterRequestFlow: {}\", event);\n\t\tfor (RequestFlowEventListener listener : listeners) {\n\t\t\tlog.debug(\"Executing listener: {}\", listener);\n\t\t\tlistener.afterRequestFlow(event);\n\t\t}\n\t}\n\n\tprivate void fireBeforeRequest(final int flowIndex, final RequestTemplate template) {\n\t\tRequestFlowEvent event = new RequestFlowEvent(flowIndex, template);\n\t\tlog.debug(\"fireBeforeRequestTemplate: {}\", event);\n\t\tfor (RequestFlowEventListener listener : listeners) {\n\t\t\tlog.debug(\"Executing listener: {}\", listener);\n\t\t\tlistener.beforeRequest(event);\n\t\t}\n\t}\n\n\tprivate void fireAfterRequest(final int flowIndex, final RequestTemplate template,\n\t\t\tfinal Exception ex, final ResponseInfo responseInfo) {\n\t\tRequestFlowEvent event = new RequestFlowEvent(flowIndex, template, ex, responseInfo);\n\t\tlog.debug(\"fireAfterRequestTemplate: {}\", event);\n\t\tfor (RequestFlowEventListener listener : listeners) {\n\t\t\tlog.debug(\"Executing listener: {}\", listener);\n\t\t\tlistener.afterRequest(event);\n\t\t}\n\t}\n}\n", "meta": {"content_hash": "3ec8aceb24096942fdf3d7cf0699be7c", "timestamp": "", "source": "github", "line_count": 252, "max_line_length": 122, "avg_line_length": 40.78968253968254, "alnum_prop": 0.7367448195349742, "repo_name": "mgm-tp/perfload-core", "id": "c4111ae0037ebc5d72c69b0036eaa04d012161f4", "size": "10899", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "perfload-client/src/main/java/com/mgmtp/perfload/core/client/web/flow/DefaultRequestFlowHandler.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "3125"}, {"name": "Java", "bytes": "623964"}, {"name": "Shell", "bytes": "2729"}]}} {"text": "declare namespace CodeceptJS {\n export interface I {\n createCitizenUser: () => string\n createSolicitorUser: () => Promise\n deleteUser: (email) => Promise\n deleteUsers: (emails) => Promise\n createClaim: (claimData: ClaimData, submitterEmail: string) => string\n linkDefendantToClaim: (referenceNumber: string, ownerEmail: string, defendantEmail: string) => void\n respondToClaim: (referenceNumber: string, ownerEmail: string, responseData: ResponseData, defendantEmail: string) => void\n\n amOnLegalAppPage: (path: string) => void\n waitForLegalAppPage: (path?: string) => void\n downloadPDF: (pdfUrl: string, sessionCookie: string) => Promise\n attachFile: (locator: string, path: string) => any\n grabAttributeFrom: (locator: string, attr: string) => any\n\n fillField: (locator: string | object, value: string) => any\n selectOption: (select: string, option: string) => any\n }\n}\n\ntype CodeceptJSHelper = {\n _before: () => void;\n _after: () => void;\n}\n\ndeclare const codecept_helper: { new(): CodeceptJSHelper }\n\ndeclare function session(selector: string, callback: Function): Promise;\ndeclare function session(selector: string, config: any, callback: Function): Promise;\n", "meta": {"content_hash": "13d0e3ccbcf6f9ad669ab0106c374736", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 125, "avg_line_length": 41.46666666666667, "alnum_prop": 0.7065916398713826, "repo_name": "hmcts/cmc-legal-rep-frontend", "id": "f89264741b284cb589edcccc79afd1c40749a966", "size": "1244", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "types/steps-override.d.ts", "mode": "33188", "license": "mit", "language": [{"name": "Dockerfile", "bytes": "909"}, {"name": "Groovy", "bytes": "2124"}, {"name": "HCL", "bytes": "1611"}, {"name": "JavaScript", "bytes": "16357"}, {"name": "Nunjucks", "bytes": "86720"}, {"name": "SCSS", "bytes": "10124"}, {"name": "Shell", "bytes": "4590"}, {"name": "TypeScript", "bytes": "682938"}]}} {"text": "from collections import namedtuple\nfrom datetime import datetime\n\nfrom scopus.classes import Retrieval\n\n\nclass CitationOverview(Retrieval):\n @property\n def authors(self):\n \"\"\"A list of namedtuples storing author information,\n where each namedtuple corresponds to one author.\n The information in each namedtuple is (name surname initials id url).\n All entries are strings.\n \"\"\"\n out = []\n order = 'name surname initials id url'\n auth = namedtuple('Author', order)\n for author in self._citeInfoMatrix.get('author'):\n author = {k.split(\":\", 1)[-1]: v for k, v in author.items()}\n new = auth(name=author.get('index-name'), id=author.get('authid'),\n surname=author.get('surname'),\n initials=author.get('initials'),\n url=author.get('author-url'))\n out.append(new)\n return out or None\n\n @property\n def cc(self):\n \"\"\"List of tuples of yearly number of citations\n for specified years.\"\"\"\n _years = range(self._start, self._end+1)\n try:\n return list(zip(_years, [d.get('$') for d in self._citeInfoMatrix['cc']]))\n except AttributeError: # No citations\n return list(zip(_years, [0]*len(_years)))\n\n @property\n def citationType_long(self):\n \"\"\"Type (long version) of the abstract (e.g. article, review).\"\"\"\n return self._citeInfoMatrix.get('citationType', {}).get('$')\n\n @property\n def citationType_short(self):\n \"\"\"Type (short version) of the abstract (e.g. ar, re).\"\"\"\n return self._citeInfoMatrix.get('citationType', {}).get('@code')\n\n @property\n def doi(self):\n \"\"\"Document Object Identifier (DOI) of the abstract.\"\"\"\n return self._identifierlegend.get('doi')\n\n @property\n def endingPage(self):\n \"\"\"Ending page.\"\"\"\n return self._citeInfoMatrix.get('endingPage')\n\n @property\n def h_index(self):\n \"\"\"h-index of ciations of the abstract (according to Scopus).\"\"\"\n return self._data['h-index']\n\n @property\n def issn(self):\n \"\"\"ISSN of the publisher.\n Note: If E-ISSN is known to Scopus, this returns both\n ISSN and E-ISSN in random order separated by blank space.\n \"\"\"\n return self._citeInfoMatrix.get('issn')\n\n @property\n def issueIdentifier(self):\n \"\"\"Issue number for abstract.\"\"\"\n return self._citeInfoMatrix.get('issueIdentifier')\n\n @property\n def lcc(self):\n \"\"\"Number of citations the abstract received\n after the specified end year.\n \"\"\"\n return self._citeInfoMatrix.get('lcc')\n\n @property\n def pcc(self):\n \"\"\"Number of citations the abstract received\n before the specified start year.\n \"\"\"\n return self._citeInfoMatrix.get('pcc')\n\n @property\n def pii(self):\n \"\"\"The Publication Item Identifier (PII) of the abstract.\"\"\"\n return self._identifierlegend.get('pii')\n\n @property\n def publicationName(self):\n \"\"\"Name of source the abstract is published in (e.g. the Journal).\"\"\"\n return self._citeInfoMatrix.get('publicationName')\n\n @property\n def scopus_id(self):\n \"\"\"The Scopus ID of the abstract. It is the second part of an EID.\n The Scopus ID might differ from the one provided.\n \"\"\"\n return self._identifierlegend.get('scopus_id')\n\n @property\n def startingPage(self):\n \"\"\"Starting page.\"\"\"\n return self._citeInfoMatrix.get('startingPage')\n\n @property\n def rangeCount(self):\n \"\"\"Number of citations for specified years.\"\"\"\n return self._citeInfoMatrix.get('rangeCount')\n\n @property\n def rowTotal(self):\n \"\"\"Number of citations (specified and omitted years).\"\"\"\n return self._citeInfoMatrix.get('rowTotal')\n\n @property\n def title(self):\n \"\"\"Abstract title.\"\"\"\n return self._citeInfoMatrix.get('title')\n\n @property\n def url(self):\n \"\"\"URL to Citation Overview API view of the abstract.\"\"\"\n return self._citeInfoMatrix.get('url')\n\n @property\n def volume(self):\n \"\"\"Volume for the abstract.\"\"\"\n return self._citeInfoMatrix.get('volume')\n\n def __init__(self, eid, start, end=datetime.now().year, refresh=False):\n \"\"\"Class to represent the results from a Scopus Citation Overview.\n See https://api.elsevier.com/documentation/guides/AbstractCitationViews.htm.\n\n Parameters\n ----------\n eid : str\n The EID of the abstract.\n\n start : str or int\n The first year for which the citation count should be loaded\n\n end : str or int (optional, default=datetime.now().year)\n The last year for which the citation count should be loaded.\n Default is the current year.\n\n refresh : bool (optional, default=False)\n Whether to refresh the cached file if it exists or not.\n\n Notes\n -----\n The files are cached in ~/.scopus/citation_overview/STANDARD/{eid}.\n Your API Key needs to be approved by Elsevier to access this API.\n \"\"\"\n # Variables\n self._start = int(start)\n self._end = int(end)\n view = \"STANDARD\" # In case Scopus adds different views in future\n\n # Get file content\n date = '{}-{}'.format(start, end)\n Retrieval.__init__(self, eid, 'CitationOverview', refresh, view=view,\n date=date)\n self._data = self._json['abstract-citations-response']\n\n # citeInfoMatrix\n m = self._data['citeInfoMatrix']['citeInfoMatrixXML']['citationMatrix']['citeInfo'][0]\n self._citeInfoMatrix = _parse_dict(m)\n # identifier-legend\n l = self._data['identifier-legend']['identifier'][0]\n self._identifierlegend = _parse_dict(l)\n # citeColumnTotalXML\n self._citeColumnTotalXML = self._data['citeColumnTotalXML'] # not used\n\n def __str__(self):\n \"\"\"Return a summary string.\"\"\"\n authors = [a.name for a in self.authors]\n if len(authors) > 1:\n authors[-1] = \" and \".join([authors[-2], authors[-1]])\n s = \"Document '{self.title}' by {authors} published in \"\\\n \"'{self.publicationName}' has the following citation trajectory \"\\\n \"for years {self._start} to {self._end}:\\n\"\\\n \"{self.cc}\\n\"\\\n \"Additionally cited {self.pcc} times before {self._start}, and \"\\\n \"{self.lcc} times after {self._end}\".format(\n self=self, authors=\", \".join(authors))\n return s\n\n\ndef _parse_dict(dct):\n \"\"\"Auxiliary function to change the keys of a dictionary.\"\"\"\n return {k.split(\":\", 1)[-1]: v for k, v in dct.items()}\n", "meta": {"content_hash": "12c6f57bb5dd4496acb1d1dbed409ae3", "timestamp": "", "source": "github", "line_count": 197, "max_line_length": 94, "avg_line_length": 34.63959390862944, "alnum_prop": 0.5943728018757327, "repo_name": "scopus-api/scopus", "id": "7cfb19dc03b943ee876da79e64ee6355ef17eec7", "size": "6824", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "scopus/abstract_citations.py", "mode": "33188", "license": "mit", "language": [{"name": "Python", "bytes": "133243"}]}} {"text": "taxer\n=====\n[![Build Status](https://travis-ci.org/teracyhq/taxer.svg?branch=develop)](https://travis-ci.org/teracyhq/taxer)\n[![Coverage Status](https://coveralls.io/repos/github/teracyhq/taxer/badge.svg?branch=develop)](https://coveralls.io/github/teracyhq/taxer?branch=develop)\n[![Code Climate](https://codeclimate.com/github/teracyhq/taxer/badges/gpa.svg)](https://codeclimate.com/github/teracyhq/taxer)\n\n\nuniversal tax calculator javascript library to calculate all kinds of taxes through out the world.\n\nLibrary Architecture\n--------------------\n\nIt's designed with plugin mechanism and minimalist in mind. By default:\n\n```js\nconst taxer = new Taxer();\ntaxer.use(new CustomCalctor());\nconst taxInfo = taxer.calc(countryCode, income, options);\n```\n\nin which:\n\nCustomCalctor should be a class implements Calctor interface which has:\n- isMatched(countryCode, taxableIncome, options) method: to be hooked up if it is the first to return true.\n- calc(taxableIncome, options) method: the taxInfo is calculated and returned.\n\nIf no matched calculator, an error will be thrown.\n\nFor example:\n\n```js\n\nexport default class VnCalctor {\n constructor() {\n }\n\n calc(taxableIncome, options={}) {\n return {\n taxableIncome: taxableIncome\n }\n }\n\n isMatched(countryCode, income, options) {\n if (typeof countryCode === 'string') {\n countryCode = countryCode.toLowerCase();\n }\n return ['vn', 'vnm', 704, 'vietnam', 'viet nam'].indexOf(countryCode) > -1;\n }\n\n // required by exector under the hood, usually ignored by calctors\n exec() {\n return undefined;\n }\n}\n```\n\nFor easier implementation, we should extend the base class Calctor, as the following:\n\n```js\n\nimport { Calctor } from 'taxer';\n\n\nexport default class VnCalctor extends Calctor {\n get currency() {\n return 'VND';\n }\n\n get supportedCountryCodes() {\n return ['vn', 'vnm', 704, 'vietnam', 'viet nam'];\n }\n\n doMonthlyGrossPayrollCalc(income, options) {\n return monthlyPayrollProgressiveCalctor.calc(income, options);\n }\n\n doYearlyGrossPayrollCalc(income, options) {\n return yearlyPayrollProgressiveCalctor.calc(income, options);\n }\n\n doMonthlyNetPayrollCalc(income, options) {\n return monthlyPayrollProgressiveCalctor.calc(income, options);\n }\n\n doYearlyNetPayrollCalc(income, options) {\n return yearlyPayrollProgressiveCalctor.calc(income, options);\n }\n}\n\n```\n\nThat's how the library architecture works.\n\n\nHow to use\n----------\n\n1. Configure\n\n 1.1. From the default taxer with built-in tax calculators:\n\n ```js\n const taxer = defaultTaxer();\n // add more custom calculator\n taxer.use(new CustomCalctor(options));\n ```\n\n 1.2. From scratch\n\n ```js\n const taxer = new Taxer();\n taxer.use(VnCalctor());\n taxer.use(UsaCalctor());\n taxer.use(SgCalctor());\n taxer.use(CustomCalctor(options));\n ```\n\n2. Use\n\n```js\nconst taxInfo = taxer.calc(countryCode, income, options);\nconsole.log(taxInfo);\n```\n\n\nHow to develop\n--------------\n\nThis is the minimalist plugin architecture inspired by express.js and koa.js a lot.\nLet's keep it as minimal and lightweight as possible.\n\nClone this repository and:\n\n```\n$ npm install\n$ npm run test\n```\n\nOr with Docker:\n\n```\n$ docker-compose up\n```\n\nHow to contribute\n-----------------\n\nBy writing custom tax plugins to create a good solid universal tax system throughout the world.\n\nFollow Teracy workflow: http://dev.teracy.org/docs/workflow.html\n\n\nReferences\n----------\n\nThese are related similar projects we should take a look:\n\n- https://github.com/rkh/income-tax\n\n- https://www.npmjs.com/package/uk-income-tax\n\n\nLicense\n-------\nMIT license. See LICENSE file.\n", "meta": {"content_hash": "95ed2cf3f49d345c7c8a7023a5203579", "timestamp": "", "source": "github", "line_count": 165, "max_line_length": 154, "avg_line_length": 21.903030303030302, "alnum_prop": 0.7047592695074709, "repo_name": "teracyhq/taxer", "id": "17aeb699df5e3c2473606bd3e901bad19c310fed", "size": "3614", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "README.md", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "70665"}]}} {"text": "ACCEPTED\n\n#### According to\nInternational Plant Names Index\n\n#### Published in\nnull\n\n#### Original name\nnull\n\n### Remarks\nnull", "meta": {"content_hash": "4694f1d12a3d5887668ae1f505b24542", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "7837baa18f220933aadbcb5818aa6a8008db5d31", "size": "171", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Polygonaceae/Rumex/Rumex hagensis/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "/**\n * @author Nidin Vinayakan\n */\nmodule nid{\n export function saveAs(data:any,name:string=\"Unnamed\"){\n var blob = new Blob([data],{type: 'application/octet-binary'});\n var url = URL.createObjectURL(blob);\n var save_link:any = document.createElementNS(\"http://www.w3.org/1999/xhtml\", \"a\");\n save_link.href = url;\n save_link.download = name;\n var event:any = document.createEvent(\"MouseEvents\");\n event.initMouseEvent(\n \"click\", true, false, null, 0, 0, 0, 0, 0\n , false, false, false, false, 0, null\n );\n save_link.dispatchEvent(event);\n }\n}\n", "meta": {"content_hash": "0513ceb82eee2d854878ef2ed195398b", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 90, "avg_line_length": 35.22222222222222, "alnum_prop": 0.5883280757097792, "repo_name": "nidin/TS-ImageLibrary", "id": "5fa5845cf6a2df0a488087ae19a39f3f2e1e5a96", "size": "634", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/nid/utils/FileUtils.ts", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "1087"}, {"name": "JavaScript", "bytes": "268980"}, {"name": "TypeScript", "bytes": "44439"}]}} {"text": "\n\nexport * from \"./src/CodeCompletionCore\";\nexport * from \"./src/SymbolTable\";\n", "meta": {"content_hash": "48e7fecea4e513a3ee50133c1f55da34", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 41, "avg_line_length": 19.75, "alnum_prop": 0.6962025316455697, "repo_name": "mike-lischke/antlr4-c3", "id": "a6567adcd01e3762430347e01f420034f4d80e89", "size": "212", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.ts", "mode": "33188", "license": "mit", "language": [{"name": "ANTLR", "bytes": "30037"}, {"name": "C#", "bytes": "43645"}, {"name": "C++", "bytes": "18356"}, {"name": "Java", "bytes": "39569"}, {"name": "TypeScript", "bytes": "125622"}]}} {"text": "var dialog = function(opt){\n if(typeof opt.content == \"string\" || opt.content instanceof jQuery){\n $(\"#dialog .content\").html(opt.content);\n }\n else{\n $(\"#dialog .content\").html(content());\n }\n\n $(\"#dialogBackground\").css(\"display\", \"block\");\n\n $(\"#dialogConfirm\").click(function(){\n if(opt.confirm != undefined &&\n typeof opt.confirm == \"function\"){\n if(!opt.confirm()){\n return;\n }\n }\n\n $(\"#dialogBackground\").css(\"display\", \"none\");\n $(this).unbind(\"click\");\n });\n\n $(\"#dialogCancel\").click(function(){\n if(opt.cancel != undefined &&\n typeof opt.cancel == \"function\"){\n opt.cancel();\n }\n\n $(\"#dialogBackground\").css(\"display\", \"none\");\n $(this).unbind(\"click\");\n });\n}\n\nvar logAppend = function(text){\n $(\"#logArea\").append(text + \"
\");\n document.getElementById('logArea').scrollTop = document.getElementById('logArea').scrollHeight;\n}\n\n\nvar warnning = function(content){\n if(typeof content == \"string\" || content instanceof jQuery){\n $(\"#warnning .content\").html(content);\n }\n else{\n $(\"#warnning .content\").html(content());\n }\n\n $(\"#warnningBackground\").css(\"display\", \"block\");\n\n $(\"#warnningConfirm\").click(function(){\n $(\"#warnningBackground\").css(\"display\", \"none\");\n $(this).unbind(\"click\");\n });\n}", "meta": {"content_hash": "94ce361cdedd7087198a6c752a211320", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 99, "avg_line_length": 26.555555555555557, "alnum_prop": 0.5355648535564853, "repo_name": "liuzip/delve", "id": "b3c2fd0e32acfa57ad0b8e6ef8f9e467c52e3eb3", "size": "1434", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "assets/js/common.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "3136"}, {"name": "HTML", "bytes": "10701"}, {"name": "JavaScript", "bytes": "564079"}]}} {"text": "Google Code Veterans 2013\n=========================\n\n## Problems\n* Hedgemony\n* Baby Height\n* Ocean View\n", "meta": {"content_hash": "0e93a685b21de319fc3c517657a37733", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 25, "avg_line_length": 14.857142857142858, "alnum_prop": 0.5576923076923077, "repo_name": "jdavis/code-jelly", "id": "5652f923b321da9865e40e5bef3c91c0ef1b9570", "size": "104", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "veterans2013/README.md", "mode": "33188", "license": "mit", "language": [{"name": "C++", "bytes": "9699"}]}} {"text": "// ===\n// \t\tDaisy Chain!\n//\t\t\t\t\t\t===\n\n// * unhappy daisies in Opera Browser due to rendering glitch\n\n// properties\n// var\n//width, height,\n\n// plot\npi = Math.PI;\n//horizon,\n//block,\n\n// palette\n//sky;\n\n// body width, 3:1 ratio\nc.width = width = b.clientWidth;\nc.height = height = width / 3;\n\n// set plot\nhorizon = height * 0.2;\nblock = 10;\n\n// set sky drop\nsky = a.createLinearGradient(0, 0, 0, height);\nsky.addColorStop(0, '#0FF');\nsky.addColorStop(1, '#00F');\n\n// set fill to sky\na.fillStyle = sky;\na.fillRect(0, 0, width, height);\n\n/**\n *\tGenerate random number.\n *\t@param {Number} min Minimum number to generate\n *\t@param {Number} max Maximum number to generate\n */\nfunction random_(min, max) {\n\t// _.random()\n\treturn min + Math.floor(Math.random() * (max - min + 1));\n}\n\n/**\n *\tIntelligently render on framerate intervals. (reduces lag)\n *\t@param {Function} frame Callback function to generate frame\n */\nfunction render_(frame) {\n\t// requestAnimationFrame shim (@paul_irish)\n\t(window.requestAnimationFrame ||\n\t\twindow.webkitRequestAnimationFrame ||\n\t\twindow.mozRequestAnimationFrame ||\n\t\tfunction(fn){\n\t\t\twindow.setTimeout(fn, 1000 / 60);\n\t\t})(frame);\n}\n\n/**\n *\tBlossoms a daisy!\n */\nfunction blossom() {\n\t// set scope\n\tvar multiplier, posMultiplier,\n\t\tstartX, startY,\n\t\tstopX, stopY,\n\t\tctrlX, ctrlY,\n\t\tdeltaX, deltaY,\n\t\tdiffX, diffY,\n\t\tposX, posY;\n\n\t// set random multiplier, determines size\n\tposMultiplier = 0;\n\tmultiplier = random_(5, (horizon * 3) / block);\n\n\t// set where to start\n\tposX = startX = random_(block, width - block);\n\tposY = startY = height - horizon + block;\n\n\t// set where to end\n\tstopX = random_(startX - multiplier, startX + multiplier);\n\tstopY = startY - (block * multiplier);\n\n\t// set gradient for quadratic curve\n\tctrlX = random_(startX - multiplier, stopX + multiplier);\n\tctrlY = random_(startY, stopY);\n\n\t// determine velocity in x and y directions\n\tdeltaX = Math.abs(stopX - startX);\n\tdeltaY = Math.abs(stopY - startY);\n\n\t// velocity, where diff = distance per frame\n\tdiffX = diffY = 1; (deltaX > deltaY) ? diffY = deltaY / deltaX : diffX = deltaX / deltaY;\n\n\t/**\n\t *\tRenders daisy flower.\n\t */\n\tfunction flower() {\n\t\t// render stamen\n\t\ta.beginPath();\n\t\ta.fillStyle = '#FF0';\n\t\ta.arc(stopX, stopY, (block * posMultiplier)/pi/1.5/1.5, 0, pi*2);\n\t\ta.fill();\n\t\t//a.restore();\n\n\t\t// render petals (unicode chars ftw)\n\t\ta.fillStyle = '#FFF';\n\t\ta.font = (block * posMultiplier) + \"pt serif\";\n\t\ta.fillText(\"\\u273f\", stopX - (block * posMultiplier)/2, stopY + (block * posMultiplier)/2, block * posMultiplier);\n\n\t\t// progress position\n\t\tposMultiplier++;\n\n\t\t// loop until flower is fully rendered\n\t\tif(posMultiplier !== multiplier) render_(flower);\n\t}\n\n\t/**\n\t *\tRenders daisy stalk.\n\t */\n\tfunction stalk() {\n\t\t// render quadratic curve\n\t\ta.beginPath();\n\t\ta.lineWidth = block;\n\t\ta.strokeStyle = '#000';\n\t\ta.moveTo(startX, startY);\n\t\ta.quadraticCurveTo(ctrlX, ctrlY, posX, posY);\n\t\ta.stroke();\n\t\t//a.restore();\n\n\t\t// set ground drop (maintains position above stalk)\n\t\ta.fillStyle = '#080';\n\t\ta.fillRect(0, height - horizon, width, horizon);\n\n\t\t// progress position\n\t\tposX += diffX;\n\t\tposY -= diffY;\n\n\t\t// loop, or move on to the flower\n\t\t(posY !== stopY) ? render_(stalk) : flower();\n\t}\n\n\t// render only when coordinates are different, else try again\n\t// fixes rendering bug where flowers would appear without stalk\n\t(stopX !== startX) ? stalk() : blossom();\n\n\t// and we're done!\n\t// return;\n}\n\n// generate random number of daisies, *spor*adically between 0.5s and 8s\n// excuse the pun...\nblossom();\nfor(var x=0, limit=random_(0, 25); x\n\n/* jshint esversion:9, node:true, strict:implied */\n\nconst common = require('./common.js'),\n utility = require('./utility.js'),\n promiseWrap = require('./promiseWrap.js'),\n urljoin = require('url-join'),\n sprintf = require('sprintf-js').sprintf,\n request = require('request'),\n dateFormat = require('dateformat');\n\nfunction utcOffset_apigeeTimeFormat(date) {\n var s = dateFormat(date, \"isoUtcDateTime\");\n s = s.slice(0, -4);\n return s.slice(-5);\n}\n\nfunction getTimeRange(start, end) {\n start = dateFormat(start, 'mm/dd/yyyy') + ' ' + utcOffset_apigeeTimeFormat(start);\n end = dateFormat(end, 'mm/dd/yyyy') + ' ' + utcOffset_apigeeTimeFormat(end);\n return start + '~' + end;\n}\n\nfunction Stat(conn) {this.conn = conn;}\n\nStat.prototype.get = promiseWrap(function(options, cb) {\n // GET \"$mgmtserver/v1/o/$ORG/e/$ENV/stats/apis?select=sum(message_count)&timeRange=01/01/2018%2000:00~08/01/2018%2000:00&timeUnit=month\"\n\n // var options = {\n // environment : 'test',\n // dimension: 'apis',\n // metric: 'sum(message_count)',\n // startTime: startTime,\n // endTime : endTime,\n // timeUnit : 'month',\n // limit : 1024,\n // optimize : true/false,\n // cacheCheck : fn\n // };\n\n if ( ! cb) { cb = options; options = {}; }\n var conn = this.conn;\n var env = options.environmentName || options.environment;\n if (!env) {\n throw new Error(\"missing environment name\");\n }\n if (!options.dimension) {\n throw new Error(\"missing dimension\");\n }\n if (!options.metric) {\n throw new Error(\"missing metric\");\n }\n common.insureFreshToken(conn, function(requestOptions) {\n var query = sprintf('?select=%s&timeUnit=%s&timeRange=%s',\n options.metric,\n options.timeUnit,\n getTimeRange(options.startTime, options.endTime));\n\n if (options.limit) {\n query += '&limit=' + options.limit;\n }\n if (options.optimize) {\n query += '&_optimize=js';\n }\n if (options.metric.indexOf('percentile')>=0) {\n query += '&t=agg_percentile';\n }\n if (options.filter) {\n // filter=(apiproxy%20eq%20%%27${APIPROXY}%27)\n query += '&filter=' + options.filter;\n }\n requestOptions.url =\n urljoin(conn.urlBase, 'environments', env, 'stats', options.dimension) + query;\n\n if (conn.verbosity>0) {\n utility.logWrite(sprintf('GET %s', requestOptions.url));\n }\n\n // it takes a long time to retrieve some stats data. So let's\n // allow the use of a cache via an upcall.\n if (typeof options.cacheCheck == 'function') {\n let uniquifier = sprintf('%s-%s-%s-%s-%s-%s-%s',\n conn.orgname, env, options.dimension, options.metric,\n dateFormat(options.startTime, 'yyyymmdd'),\n dateFormat(options.endTime, 'yyyymmdd'), options.timeUnit);\n let cacheResponse = options.cacheCheck(requestOptions.url, uniquifier);\n if (cacheResponse) {\n if (cacheResponse.data) {\n return cb(null, {data: JSON.parse(cacheResponse.data) });\n }\n else if (cacheResponse.cachefile) {\n return request.get(requestOptions, common.callback(conn, [200], function(e, data){\n cb(e, {cachefile: cacheResponse.cachefile, data:data});\n }));\n }\n }\n }\n return request.get(requestOptions, common.callback(conn, [200], function(e, data){\n cb(e, {data:data});\n }));\n });\n});\n\nmodule.exports = Stat;\n", "meta": {"content_hash": "50b2a67220284b88f4324d19c0c92a4a", "timestamp": "", "source": "github", "line_count": 109, "max_line_length": 139, "avg_line_length": 34.22018348623853, "alnum_prop": 0.5761394101876676, "repo_name": "DinoChiesa/apigee-edge-js", "id": "052d5489fbb513f7a5c69c79d9f297277b8aa5a3", "size": "3730", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "lib/stat.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "JavaScript", "bytes": "411640"}, {"name": "XSLT", "bytes": "2212"}]}} {"text": "\n\n \n \n \n classical-realizability: Not compatible \ud83d\udc7c\n \n \n \n \n \n \n \n \n \n \n
\n \n
\n
\n
\n \u00ab Up\n

\n classical-realizability\n \n 8.5.0\n Not compatible \ud83d\udc7c\n \n

\n

\ud83d\udcc5 (2022-10-22 11:23:20 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-num            base        Num library distributed with the OCaml compiler\nbase-threads        base\nbase-unix           base\ncamlp5              7.14        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\nconf-perl           2           Virtual package relying on perl\ncoq                 8.7.1+2     Formal proof management system\nnum                 0           The Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.04.2      The OCaml compiler (virtual package)\nocaml-base-compiler 4.04.2      Official 4.04.2 release\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.5       A library manager for OCaml\n# opam file:\nopam-version: "2.0"\nmaintainer: "matej.kosik@inria.fr"\nhomepage: "https://github.com/coq-contribs/classical-realizability"\nlicense: "BSD"\nbuild: [make "-j%{jobs}%"]\ninstall: [make "install"]\nremove: ["rm" "-R" "%{lib}%/coq/user-contrib/ClassicalRealizability"]\ndepends: [\n  "ocaml"\n  "coq" {>= "8.5" & < "8.6~"}\n]\ntags: [ "keyword:classical realizability" "keyword:krivine's realizability" "keyword:primitive datatype" "keyword:non determinism" "keyword:quote" "keyword:axiom of countable choice" "keyword:real numbers" "category:Mathematics/Logic/Foundations" ]\nauthors: [ "Lionel Rieg <lionel.rieg@ens-lyon.org>" ]\nbug-reports: "https://github.com/coq-contribs/classical-realizability/issues"\ndev-repo: "git+https://github.com/coq-contribs/classical-realizability.git"\nsynopsis: "Krivine's classical realizability"\ndescription: """\nThe aim of this Coq library is to provide a framework for checking\nproofs in Krivine's classical realizability for second-order Peano arithmetic.\nIt is designed to be as extensible as the original theory by Krivine and to\nsupport on-the-fly extensions by new instructions with their evaluation\nrules."""\nflags: light-uninstall\nurl {\n  src:\n    "https://github.com/coq-contribs/classical-realizability/archive/v8.5.0.tar.gz"\n  checksum: "md5=7e6fb42bd18a9d7282d8d694d322f9a6"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-classical-realizability.8.5.0 coq.8.7.1+2
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.7.1+2).\nThe following dependencies couldn't be met:\n  - coq-classical-realizability -> coq < 8.6~ -> ocaml < 4.03.0\n      base of this switch (use `--unlock-base' to force)\nYour request can't be satisfied:\n  - No available version of coq satisfies the constraints\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-classical-realizability.8.5.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "32178ad3e89633f7ce45dbc1ccb06dad", "timestamp": "", "source": "github", "line_count": 171, "max_line_length": 332, "avg_line_length": 44.228070175438596, "alnum_prop": 0.5651196615099828, "repo_name": "coq-bench/coq-bench.github.io", "id": "a67cb9af7ad5c8170da3f92608554c4cfda2ec05", "size": "7588", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.04.2-2.0.5/released/8.7.1+2/classical-realizability/8.5.0.html", "mode": "33188", "license": "mit", "language": []}} {"text": "\n\n#import \n#import \n#import \"FBFetchedAppSettings.h\"\n\n@class FBRequest;\n@class FBSession;\n\n@protocol FBGraphObject;\n\ntypedef enum FBAdvertisingTrackingStatus {\n AdvertisingTrackingAllowed,\n AdvertisingTrackingDisallowed,\n AdvertisingTrackingUnspecified\n} FBAdvertisingTrackingStatus;\n\n@interface FBUtility : NSObject\n\n+ (NSDictionary*)queryParamsDictionaryFromFBURL:(NSURL*)url;\n+ (NSDictionary*)dictionaryByParsingURLQueryPart:(NSString *)encodedString;\n+ (NSString *)stringBySerializingQueryParameters:(NSDictionary *)queryParameters;\n+ (NSString *)stringByURLDecodingString:(NSString*)escapedString;\n+ (NSString*)stringByURLEncodingString:(NSString*)unescapedString;\n+ (id)graphObjectInArray:(NSArray*)array withSameIDAs:(id)item;\n\n+ (unsigned long)currentTimeInMilliseconds;\n+ (NSTimeInterval)randomTimeInterval:(NSTimeInterval)minValue withMaxValue:(NSTimeInterval)maxValue;\n+ (void)centerView:(UIView*)view tableView:(UITableView*)tableView;\n+ (NSString *)stringFBIDFromObject:(id)object;\n+ (NSString *)stringAppBaseUrlFromAppId:(NSString *)appID urlSchemeSuffix:(NSString *)urlSchemeSuffix;\n+ (NSDate*)expirationDateFromExpirationTimeIntervalString:(NSString*)expirationTime;\n+ (NSDate*)expirationDateFromExpirationUnixTimeString:(NSString*)expirationTime;\n+ (NSBundle *)facebookSDKBundle;\n+ (NSString *)localizedStringForKey:(NSString *)key\n withDefault:(NSString *)value;\n+ (NSString *)localizedStringForKey:(NSString *)key\n withDefault:(NSString *)value\n inBundle:(NSBundle *)bundle;\n// Returns YES when the bundle identifier is for one of the native facebook apps\n+ (BOOL)isFacebookBundleIdentifier:(NSString *)bundleIdentifier;\n\n+ (BOOL)isPublishPermission:(NSString*)permission;\n+ (BOOL)areAllPermissionsReadPermissions:(NSArray*)permissions;\n+ (NSArray*)addBasicInfoPermission:(NSArray*)permissions;\n+ (void)fetchAppSettings:(NSString *)appID\n callback:(void (^)(FBFetchedAppSettings *, NSError *))callback;\n+ (FBFetchedAppSettings *)fetchedAppSettings;\n+ (NSString *)attributionID;\n+ (NSString *)advertiserID;\n+ (FBAdvertisingTrackingStatus)advertisingTrackingStatus;\n+ (void)updateParametersWithEventUsageLimits:(NSMutableDictionary *)parameters;\n\n// Encode a data structure in JSON, any errors will just be logged.\n+ (NSString *)simpleJSONEncode:(id)data;\n+ (id)simpleJSONDecode:(NSString *)jsonEncoding;\n+ (NSString *)simpleJSONEncode:(id)data\n error:(NSError **)error\n writingOptions:(NSJSONWritingOptions)writingOptions;\n+ (id)simpleJSONDecode:(NSString *)jsonEncoding\n error:(NSError **)error;\n+ (BOOL) isRetinaDisplay;\n+ (NSString *)newUUIDString;\n+ (BOOL)isRegisteredURLScheme:(NSString *)urlScheme;\n\n+ (NSString *) buildFacebookUrlWithPre:(NSString*)pre;\n+ (NSString *) buildFacebookUrlWithPre:(NSString*)pre\n withPost:(NSString *)post;\n\n@end\n \n#define FBConditionalLog(condition, desc, ...) \\\ndo { \\\n if (!(condition)) {\t\\\n NSString *msg = [NSString stringWithFormat:(desc), ##__VA_ARGS__]; \\\n NSLog(@\"FBConditionalLog: %@\", msg); \\\n } \\\n} while(NO)\n \n#define FB_BASE_URL @\"facebook.com\"\n", "meta": {"content_hash": "af69a6a40fff78a1b6be8183fbde563d", "timestamp": "", "source": "github", "line_count": 80, "max_line_length": 102, "avg_line_length": 41.0375, "alnum_prop": 0.7401766676819982, "repo_name": "bliustar/couponquest", "id": "f8e9a455c850ec7a394c32558c1a36edab59cb45", "size": "3881", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Pods/Facebook-iOS-SDK/src/FBUtility.h", "mode": "33188", "license": "mit", "language": [{"name": "AppleScript", "bytes": "18419"}, {"name": "C", "bytes": "270586"}, {"name": "C++", "bytes": "6727"}, {"name": "CSS", "bytes": "6111"}, {"name": "JavaScript", "bytes": "111"}, {"name": "M", "bytes": "163592"}, {"name": "Objective-C", "bytes": "7524991"}, {"name": "Ruby", "bytes": "13807"}, {"name": "Shell", "bytes": "40797"}]}} {"text": "\n\n \n \n \n historical-examples: Not compatible \ud83d\udc7c\n \n \n \n \n \n \n \n \n \n \n
\n \n
\n
\n
\n \u00ab Up\n

\n historical-examples\n \n 8.7.0\n Not compatible \ud83d\udc7c\n \n

\n

\ud83d\udcc5 (2022-11-21 00:31:44 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-num            base        Num library distributed with the OCaml compiler\nbase-ocamlbuild     base        OCamlbuild binary and libraries distributed with the OCaml compiler\nbase-threads        base\nbase-unix           base\ncamlp5              7.14        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\nconf-perl           2           Virtual package relying on perl\ncoq                 8.6         Formal proof management system\nnum                 0           The Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.02.3      The OCaml compiler (virtual package)\nocaml-base-compiler 4.02.3      Official 4.02.3 release\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.5       A library manager for OCaml\n# opam file:\nopam-version: "2.0"\nmaintainer: "Hugo.Herbelin@inria.fr"\nhomepage: "https://github.com/coq-contribs/historical-examples"\nlicense: "LGPL 2.1"\nbuild: [make "-j%{jobs}%"]\ninstall: [make "install"]\nremove: ["rm" "-R" "%{lib}%/coq/user-contrib/HistoricalExamples"]\ndepends: [\n  "ocaml"\n  "coq" {>= "8.7" & < "8.8~"}\n]\ntags: [ "keyword: Newman's lemma" "keyword: Tarski's fixpoint theorem" "keyword: line formatting" "keyword: binary-search paradigm" "keyword: square root approximation" "keyword: Calculus of Constructions" "keyword: history of Coq" "category: Miscellaneous/Coq Use Examples" ]\nauthors: [ "G\u00e9rard Huet" "Christine Paulin" ]\nbug-reports: "https://github.com/coq-contribs/historical-examples/issues"\ndev-repo: "git+https://github.com/coq-contribs/historical-examples.git"\nsynopsis: "Historical examples developed in the (pure) Calculus of Constructions"\ndescription: """\nThis is a collection of historical examples developed in\nsystem CoC that implemented Coquand's Calculus of Constructions.\nNewman.v and Tarski.v originate in version 1.10, Manna.v and\nFormat.v are from version 4.3. Their evolution to the Calculus of\nInductive Constructions (up to Coq V6.3) are in MannaCIC.v and\nFormatCIC.v. (Collection by Hugo Herbelin.)"""\nflags: light-uninstall\nurl {\n  src:\n    "https://github.com/coq-contribs/historical-examples/archive/v8.7.0.tar.gz"\n  checksum: "md5=b77b1bc10c081e5fce0ce9fb46e7a661"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-historical-examples.8.7.0 coq.8.6
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.6).\nThe following dependencies couldn't be met:\n  - coq-historical-examples -> coq >= 8.7 -> ocaml >= 4.05.0\n      base of this switch (use `--unlock-base' to force)\nYour request can't be satisfied:\n  - No available version of coq satisfies the constraints\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-historical-examples.8.7.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "db15d63af738afb6716f3fed684f578b", "timestamp": "", "source": "github", "line_count": 173, "max_line_length": 364, "avg_line_length": 44.68208092485549, "alnum_prop": 0.5670116429495472, "repo_name": "coq-bench/coq-bench.github.io", "id": "a1f2f1b4873915d54307d2751605a717d19f08e6", "size": "7756", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.02.3-2.0.6/released/8.6/historical-examples/8.7.0.html", "mode": "33188", "license": "mit", "language": []}} {"text": "\n\n \n \n \"\u700f\u89bd\u4e3b\u9801\"\n \"\u5411\u4e0a\u700f\u89bd\"\n \"\u66f4\u591a\u9078\u9805\"\n \"\u5b8c\u6210\"\n \"\u986f\u793a\u5168\u90e8\"\n \"\u9078\u64c7\u61c9\u7528\u7a0b\u5f0f\"\n \"\u6e05\u9664\u67e5\u8a62\"\n \"\u641c\u5c0b\u67e5\u8a62\"\n \"\u641c\u5c0b\"\n \"\u63d0\u4ea4\u67e5\u8a62\"\n \"\u8a9e\u97f3\u641c\u5c0b\"\n \"\u5206\u4eab\u5c0d\u8c61\"\n \"\u8207\u300c%s\u300d\u5206\u4eab\"\n", "meta": {"content_hash": "3f9e163499f8e614270043234dc8f1ea", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 198, "avg_line_length": 85.88888888888889, "alnum_prop": 0.7509702457956016, "repo_name": "orlyngerano/searchmedicine", "id": "64ebf00c15a4b4d8ef6557b0e32bbd795413bf04", "size": "1648", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "uMedInfo/build/intermediates/res/debug/values-zh-rHK/values-zh-rHK.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "485262"}]}} {"text": "\n\n \n \n \n dictionaries: 18 s \ud83c\udfc6\n \n \n \n \n \n \n \n \n \n \n
\n \n
\n
\n
\n \u00ab Up\n

\n dictionaries\n \n 8.7.0\n 18 s \ud83c\udfc6\n \n

\n

\ud83d\udcc5 (2022-03-24 00:13:48 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed # Synopsis\nbase-bigarray       base\nbase-threads        base\nbase-unix           base\ncamlp5              7.14        Preprocessor-pretty-printer of OCaml\nconf-findutils      1           Virtual package relying on findutils\nconf-perl           2           Virtual package relying on perl\ncoq                 8.7.0       Formal proof management system\nnum                 1.4         The legacy Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.07.1      The OCaml compiler (virtual package)\nocaml-base-compiler 4.07.1      Official release 4.07.1\nocaml-config        1           OCaml Switch Configuration\nocamlfind           1.9.3       A library manager for OCaml\n# opam file:\nopam-version: "2.0"\nmaintainer: "Hugo.Herbelin@inria.fr"\nhomepage: "https://github.com/coq-contribs/dictionaries"\nlicense: "LGPL 2.1"\nbuild: [make "-j%{jobs}%"]\ninstall: [make "install"]\nremove: ["rm" "-R" "%{lib}%/coq/user-contrib/Dictionaries"]\ndepends: [\n  "ocaml"\n  "coq" {>= "8.7" & < "8.8~"}\n]\ntags: [\n  "keyword: modules"\n  "keyword: functors"\n  "keyword: search trees"\n  "category: Computer Science/Data Types and Data Structures"\n  "category: Miscellaneous/Extracted Programs/Data structures"\n  "date: 2003-02-6"\n]\nauthors: [ "Pierre Cast\u00e9ran <casteran@labri.fr>" ]\nbug-reports: "https://github.com/coq-contribs/dictionaries/issues"\ndev-repo: "git+https://github.com/coq-contribs/dictionaries.git"\nsynopsis: "Dictionaries (with modules)"\ndescription: """\nThis file contains a specification for dictionaries, and\nan implementation using binary search trees. Coq's module system,\nwith module types and functors, is heavily used. It can be considered\nas a certified version of an example proposed by Paulson in Standard ML.\nA detailed description (in French) can be found in the chapter 11 of\nThe Coq'Art, the book written by Yves Bertot and Pierre Cast\u00e9ran\n(please follow the link http://coq.inria.fr/doc-eng.html)"""\nflags: light-uninstall\nurl {\n  src: "https://github.com/coq-contribs/dictionaries/archive/v8.7.0.tar.gz"\n  checksum: "md5=1e1e78b0a76827b75de4f43ec8602c1d"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-dictionaries.8.7.0 coq.8.7.0
\n
Return code
\n
0
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
opam list; echo; ulimit -Sv 4000000; timeout 4h opam install -y --deps-only coq-dictionaries.8.7.0 coq.8.7.0
\n
Return code
\n
0
\n
Duration
\n
11 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
opam list; echo; ulimit -Sv 16000000; timeout 4h opam install -y -v coq-dictionaries.8.7.0 coq.8.7.0
\n
Return code
\n
0
\n
Duration
\n
18 s
\n
\n

Installation size

\n

Total: 320 K

\n
    \n
  • 193 K ../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Dictionaries/dict.vo
  • \n
  • 94 K ../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Dictionaries/dict.glob
  • \n
  • 33 K ../ocaml-base-compiler.4.07.1/lib/coq/user-contrib/Dictionaries/dict.v
  • \n
\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
opam remove -y coq-dictionaries.8.7.0
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "9ab734753a1548b92a97ce193d2beae4", "timestamp": "", "source": "github", "line_count": 174, "max_line_length": 159, "avg_line_length": 43.298850574712645, "alnum_prop": 0.556941863551898, "repo_name": "coq-bench/coq-bench.github.io", "id": "a90ec1506139dd590b76b2326309e3fdb4f4ad78", "size": "7561", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.07.1-2.0.6/released/8.7.0/dictionaries/8.7.0.html", "mode": "33188", "license": "mit", "language": []}} {"text": "\ufeffusing System.Collections;\nusing System.Collections.Generic;\nusing UnityEngine;\n\npublic class DebugLogPositionInformation : MonoBehaviour {\n\n [SerializeField]\n private string ObjectName = \"\";\n [SerializeField]\n private Vector3 RectTransformPosition = Vector3.zero;\n [SerializeField]\n private float TimeSinceStart = 0.0f;\n [SerializeField, Range(0, 1000000)]\n private float ComparisonThreshold = 500.0f;\n\n Vector3 PreviousRectTransformPosition = Vector3.zero;\n\n // Use this for initialization\n void Start () {\n\t\t\n\t}\n\t\n\t// Update is called once per frame\n\tvoid Update () {\n ObjectName = gameObject.name;\n PreviousRectTransformPosition = RectTransformPosition;\n RectTransformPosition = GetComponent().anchoredPosition;\n TimeSinceStart = Time.unscaledTime;\n\n CompareCurrentAndPreviousRectTransformPositions();\n\t}\n\n void CompareCurrentAndPreviousRectTransformPositions()\n {\n if(Mathf.Abs(RectTransformPosition.x - PreviousRectTransformPosition.x) > ComparisonThreshold ||\n Mathf.Abs(RectTransformPosition.y - PreviousRectTransformPosition.y) > ComparisonThreshold ||\n Mathf.Abs(RectTransformPosition.z - PreviousRectTransformPosition.z) > ComparisonThreshold)\n {\n Debug.Log(\"Position drastically changed from <\" + PreviousRectTransformPosition + \"> to <\" + RectTransformPosition + \"> at: <\" + Time.unscaledTime + \">.\");\n }\n }\n}\n", "meta": {"content_hash": "ed929c523db0a213362286cdb4dd86ba", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 167, "avg_line_length": 34.92857142857143, "alnum_prop": 0.7143830947511929, "repo_name": "Alfabits/Pip-Pup-Source-Backup", "id": "94b3c6b5fcc451512399d0869c840482ab34d18c", "size": "1469", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Scripts/Debugging/DebugLogPositionInformation.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "191984"}]}} {"text": "@implementation RVMutableDictionary\n\n-(instancetype) init {\n self = [super init];\n if(self) {\n _backingDict = [[NSMutableDictionary alloc] init];\n }\n return self;\n}\n\n-(void) setObject:(id)anObject forKey:(id)aKey {\n if(anObject == nil) {\n return;\n }\n [_backingDict setObject:anObject forKey:aKey];\n}\n\n-(id) valueForKey:(NSString *) key {\n return [_backingDict objectForKey:key];\n}\n\n- (void)setValue:(id)value forKey:(NSString *)key{\n [_backingDict setObject:value forKey:key];\n}\n\n//Dont override. We do want the app to error out if there is a missing key\n//- (id)valueForUndefinedKey:(NSString *)key {\n//\n//}\n\n- (void)setValue:(id)value forUndefinedKey:(NSString *)key{\n [_backingDict setObject:value forKey:key];\n}\n\n#pragma mark - passing messages to the backing dictionary\n- (void)forwardInvocation:(NSInvocation *)anInvocation\n{\n if ([_backingDict respondsToSelector:\n [anInvocation selector]])\n [anInvocation invokeWithTarget:_backingDict];\n else\n [super forwardInvocation:anInvocation];\n}\n\n- (NSMethodSignature *)methodSignatureForSelector:(SEL)aSelector {\n return [NSMutableDictionary instanceMethodSignatureForSelector:aSelector];\n}\n\n#pragma mark - remaing KVO compliant. Invisibly ignoring this class\n- (void)addObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath options:(NSKeyValueObservingOptions)options context:(void *)context {\n [_backingDict addObserver:observer forKeyPath:keyPath options:options context:context];\n}\n\n- (void)removeObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath context:(void *)context {\n [_backingDict removeObserver:observer forKeyPath:keyPath context:context];\n}\n\n- (void)removeObserver:(NSObject *)observer forKeyPath:(NSString *)keyPath {\n [_backingDict removeObserver:observer forKeyPath: keyPath];\n}\n\n-(NSString *) description {\n return [_backingDict description];\n}\n\n@end\n", "meta": {"content_hash": "3ae7ee87f8c58ea0846bb47b1abe6a50", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 141, "avg_line_length": 29.303030303030305, "alnum_prop": 0.7233712512926577, "repo_name": "aaronSig/rivet", "id": "a08a8f7ab37aa9b8cc4afe73a46d18da7ae7a656", "size": "2296", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Rivet/RVMutableDictionary.m", "mode": "33188", "license": "mit", "language": [{"name": "Objective-C", "bytes": "24549"}, {"name": "Ruby", "bytes": "6100"}]}} {"text": "StroikaRoot=$(abspath ../../../../../../)/\n\nifneq ($(CONFIGURATION),)\n\t#no error if missing cuz could be doing make clobber\n\t-include $(StroikaRoot)IntermediateFiles/$(CONFIGURATION)/Configuration.mk\nendif\n\ninclude $(StroikaRoot)ScriptsLib/Makefile-Common.mk\ninclude $(StroikaRoot)ScriptsLib/SharedMakeVariables-Default.mk\n\nSrcDir\t= $(StroikaRoot)Library/Sources/Stroika/Frameworks/SystemPerformance/Support/\nObjDir\t=\t$(StroikaRoot)IntermediateFiles/$(CONFIGURATION)/Library/Frameworks/SystemPerformance/Support/\n\nifeq ($(WIN_USE_PROGRAM_DATABASE),1)\nCXXFLAGS+=-Fd$(call FUNCTION_CONVERT_FILEPATH_TO_COMPILER_NATIVE,$(StroikaRoot)Builds/$(CONFIGURATION)/Stroika-Frameworks.pdb)\nendif\n\n\nvpath %cpp $(SrcDir)\n\nObjs\t=\t\\\n\n\nifneq ($(findstring Windows,$(TARGET_PLATFORMS)),)\nObjs+=\t$(ObjDir)WMICollector${OBJ_SUFFIX}\nendif\n\ninclude $(StroikaRoot)ScriptsLib/SharedBuildRules-Default.mk\n\nall:\t$(Objs)\n\n", "meta": {"content_hash": "9407a90a5290fa8528827bedf1a69352", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 126, "avg_line_length": 28.93548387096774, "alnum_prop": 0.770345596432553, "repo_name": "SophistSolutions/Stroika", "id": "4304dff40e9711abd881197b35dbe3326c40b894", "size": "897", "binary": false, "copies": "1", "ref": "refs/heads/v2.1-Release", "path": "Library/Sources/Stroika/Frameworks/SystemPerformance/Support/Makefile", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "438"}, {"name": "C", "bytes": "15598"}, {"name": "C++", "bytes": "12952163"}, {"name": "Dockerfile", "bytes": "38254"}, {"name": "Makefile", "bytes": "299984"}, {"name": "Perl", "bytes": "52501"}, {"name": "R", "bytes": "29685"}, {"name": "Shell", "bytes": "81240"}]}} {"text": "\n\n\n \n Nubi - Place to learn, way to heaven\n \n \n \n \n \n\n \n \n \n
\n
\n {{ message }}\n
\n \n
\n
\n {{msg.id}}: {{msg.text}}\n
\n
\n
\n
\n
\n \n \n\n\n", "meta": {"content_hash": "016cd4166851c0a29e46c56318d748c9", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 108, "avg_line_length": 35.638888888888886, "alnum_prop": 0.41504286827747466, "repo_name": "nevaku/kurniakue", "id": "c1fddfd7854036c4244440c7bdccd95d47a78c02", "size": "2566", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "kurse/src/main/webapp/nubi.html", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2850"}, {"name": "CSS", "bytes": "1394"}, {"name": "HTML", "bytes": "23766"}, {"name": "Java", "bytes": "743067"}, {"name": "JavaScript", "bytes": "47267"}, {"name": "PHP", "bytes": "1021"}]}} {"text": "\n\n\n\t\n\t\t\n\t\t\t\n\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\t\n\t\t\t\t\n\t\t\t\n\t\t\n\t\t\n\t\t\t\n\t\t\n\t\t\n\t\t\t\n\t\t\n\t\n\n", "meta": {"content_hash": "d81b1eb2822bc24de68518d7b5a4a6a9", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 68, "avg_line_length": 28.0, "alnum_prop": 0.6488095238095238, "repo_name": "TheCoSMoCompany/biopredyn", "id": "17535b5e576ec46677ba11487ae6f18a49e3210a", "size": "672", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "Prototype/src/libsbml-5.10.0/src/sbml/validator/test/test-data/sbml-unit-constraints/20701-pass-00-15.xml", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "3535918"}, {"name": "C++", "bytes": "26120778"}, {"name": "CMake", "bytes": "455400"}, {"name": "CSS", "bytes": "49020"}, {"name": "Gnuplot", "bytes": "206"}, {"name": "HTML", "bytes": "193068"}, {"name": "Java", "bytes": "66517"}, {"name": "JavaScript", "bytes": "3847"}, {"name": "Makefile", "bytes": "30905"}, {"name": "Perl", "bytes": "3018"}, {"name": "Python", "bytes": "7891301"}, {"name": "Shell", "bytes": "247654"}, {"name": "TeX", "bytes": "22566"}, {"name": "XSLT", "bytes": "55564"}]}} {"text": "\n\n\n\n\nUses of Package org.eclipse.emf.cdo.common.admin (CDO Model Repository Documentation)\n\n\n\n\n\n\n\n\n\n
\n
    \n
  • Prev
  • \n
  • Next
  • \n
\n\n\n
\n\n
\n\n\n
\n\n
\n

Uses of Package
org.eclipse.emf.cdo.common.admin

\n
\n
\n\n
\n\n\n
\n
    \n
  • Prev
  • \n
  • Next
  • \n
\n\n\n
\n\n
\n\n\n
\n\n

Copyright (c) 2011-2015 Eike Stepper (Berlin, Germany) and others.

\n\n\n", "meta": {"content_hash": "8c93c173c927df36696dec6c331ff3bc", "timestamp": "", "source": "github", "line_count": 221, "max_line_length": 330, "avg_line_length": 44.55656108597285, "alnum_prop": 0.6509596831522291, "repo_name": "kribe48/wasp-mbse", "id": "fadde866ac62a4737e451d1f307a64d73404c347", "size": "9847", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "WASP-turtlebot-DSL/.metadata/.plugins/org.eclipse.pde.core/Eclipse Application/org.eclipse.osgi/142/0/.cp/javadoc/org/eclipse/emf/cdo/common/admin/package-use.html", "mode": "33188", "license": "mit", "language": [{"name": "CMake", "bytes": "6732"}, {"name": "CSS", "bytes": "179891"}, {"name": "GAP", "bytes": "163745"}, {"name": "HTML", "bytes": "1197667"}, {"name": "Java", "bytes": "1369191"}, {"name": "JavaScript", "bytes": "1555"}, {"name": "Python", "bytes": "11033"}, {"name": "Roff", "bytes": "303"}, {"name": "Xtend", "bytes": "13981"}]}} {"text": "\n\n\n\n\n\nClusterPassivationStoreConsumer (BOM: * : All 2.4.0.Final API)\n\n\n\n\n\n\n\n\n
\n\n\n\n\n\n\n\n
Thorntail API, 2.4.0.Final
\n
\n
\n\n\n\n
\n\n
\n
\n
    \n
  • Summary: 
  • \n
  • Nested | 
  • \n
  • Field | 
  • \n
  • Constr | 
  • \n
  • Method
  • \n
\n
    \n
  • Detail: 
  • \n
  • Field | 
  • \n
  • Constr | 
  • \n
  • Method
  • \n
\n
\n\n\n
\n\n\n
\n
org.wildfly.swarm.config.ejb3
\n

Interface ClusterPassivationStoreConsumer<T extends ClusterPassivationStore<T>>

\n
\n
\n
\n
    \n
  • \n
    \n
    Functional Interface:
    \n
    This is a functional interface and can therefore be used as the assignment target for a lambda expression or method reference.
    \n
    \n
    \n
    \n
    @FunctionalInterface\npublic interface ClusterPassivationStoreConsumer<T extends ClusterPassivationStore<T>>
    \n
  • \n
\n
\n
\n\n
\n
\n\n
\n
\n\n\n
\n\n\n\n\n\n\n\n
Thorntail API, 2.4.0.Final
\n
\n
\n\n\n\n
\n\n
\n
\n
    \n
  • Summary: 
  • \n
  • Nested | 
  • \n
  • Field | 
  • \n
  • Constr | 
  • \n
  • Method
  • \n
\n
    \n
  • Detail: 
  • \n
  • Field | 
  • \n
  • Constr | 
  • \n
  • Method
  • \n
\n
\n\n\n
\n\n

Copyright © 2019 JBoss by Red Hat. All rights reserved.

\n\n\n", "meta": {"content_hash": "23a40d844598ca106cb55992db8a162b", "timestamp": "", "source": "github", "line_count": 248, "max_line_length": 732, "avg_line_length": 47.068548387096776, "alnum_prop": 0.6682086867129272, "repo_name": "wildfly-swarm/wildfly-swarm-javadocs", "id": "502b0b4e3260af1ed1411b9f43ef8f2ae16f3149", "size": "11673", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "2.4.0.Final/apidocs/org/wildfly/swarm/config/ejb3/ClusterPassivationStoreConsumer.html", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "using System;\nusing NUnit.Framework;\nusing Moq;\nusing Rothko.Model;\nusing Rothko.Commands;\nusing Rothko.Interfaces.Services;\nusing System.Collections.Generic;\nusing Rothko.Enumerators;\n\nnamespace Rothko.Tests.Commands\n{\n\t[TestFixture]\n\tpublic class AttackCommandTests\n\t{\n\t\tMock _RandomNumberGeneratorServiceMock;\n\t\tIRandomNumberGeneratorService _RandomNumberGeneratorService;\n\t\tUnit _AttackingUnit;\n\t\tUnit _DefendingUnit;\n\t\tUnit _CarriedUnit;\n\t\tTile _AttackingUnitTile;\n\t\tTile _DefendingUnitTile;\n\t\tDefenseMultiplier _DefenseMultiplierDefendingUnit;\n\t\tAttackMultiplier _AttackMultiplier;\n\t\tList _Tiles;\n\t\tList _DefenseMultipliers;\n\t\tList _AttackMultipliers;\n\t\tMap _Map;\n\t\tAttackCommand _Command;\n\n\t\t[SetUp]\n\t\tpublic void SetUp()\n\t\t{\n\t\t\t_RandomNumberGeneratorServiceMock = new Mock();\n\t\t\t_RandomNumberGeneratorService = _RandomNumberGeneratorServiceMock.Object;\n\n\t\t\t_AttackingUnit = new Unit()\n\t\t\t{\n\t\t\t\tID = 1,\n\t\t\t\tIDUnitType = 1,\n\t\t\t\tHealthPoints = 10,\n\t\t\t\tX = 1,\n\t\t\t\tY = 1,\n\t\t\t\tState = UnitState.Attacking\n\t\t\t};\n\n\t\t\t_DefendingUnit = new Unit()\n\t\t\t{\n\t\t\t\tID = 2,\n\t\t\t\tIDUnitType = 2,\n\t\t\t\tHealthPoints = 10,\n\t\t\t\tX = 2,\n\t\t\t\tY = 1,\n\t\t\t\tNumberOfUnitsBeingCarried = 1,\n\t\t\t\tNumberOfUnitsThatCanBeCarried = 1\n\t\t\t};\n\n\t\t\t_CarriedUnit = new Unit() { ID = 3, HealthPoints = 10, IDUnitCarrier = _DefendingUnit.ID };\n\n\t\t\t_AttackingUnitTile = new Tile()\n\t\t\t{\n\t\t\t\tID = 1,\n\t\t\t\tIDTileType = 1,\n\t\t\t\tX = _AttackingUnit.X,\n\t\t\t\tY = _AttackingUnit.Y\n\t\t\t};\n\n\t\t\t_DefendingUnitTile = new Tile()\n\t\t\t{\n\t\t\t\tID = 2,\n\t\t\t\tIDTileType = 2,\n\t\t\t\tX = _DefendingUnit.X,\n\t\t\t\tY = _DefendingUnit.Y\n\t\t\t};\n\n\t\t\t_AttackMultiplier = new AttackMultiplier()\n\t\t\t{\n\t\t\t\tAttackingUnitIDUnitType = _AttackingUnit.IDUnitType,\n\t\t\t\tDefendingUnitIDUnitType = _DefendingUnit.IDUnitType\n\t\t\t};\n\n\t\t\t_DefenseMultiplierDefendingUnit = new DefenseMultiplier()\n\t\t\t{\n\t\t\t\tIDTileType = 2,\n\t\t\t\tIDUnitType = 2\n\t\t\t};\n\n\t\t\t_Tiles = new List();\n\n\t\t\t_Tiles.Add (_AttackingUnitTile);\n\t\t\t_Tiles.Add (_DefendingUnitTile);\n\n\t\t\t_Map = new Map();\n\n\t\t\t_Map.Units.Add (_AttackingUnit);\n\t\t\t_Map.Units.Add (_DefendingUnit);\n\t\t\t_Map.Units.Add (_CarriedUnit);\n\n\t\t\t_Map.Tiles = _Tiles;\n\n\t\t\t_Map.IDUnit_Selected = 1;\n\n\t\t\t_DefenseMultipliers = new List();\n\n\t\t\t_DefenseMultipliers.Add (_DefenseMultiplierDefendingUnit);\n\n\t\t\t_AttackMultipliers = new List();\n\n\t\t\t_AttackMultipliers.Add (_AttackMultiplier);\n\n\t\t\t_Command = new AttackCommand(\n\t\t\t\t_RandomNumberGeneratorService,\n\t\t\t\t_Map, _Map.Units, _Tiles, \n\t\t\t\t_AttackMultipliers,\n\t\t\t\t_DefenseMultipliers, _DefendingUnit.X, _DefendingUnit.Y);\n\t\t}\n\n\t\t[Test]\n\t\t[TestCase(5, 10, 1, 1, 1, 5, 10, Result = 1)]\n\t\t[TestCase(5, 10, 1, 1, 1, 4, 10, Result = 1)]\n\t\t[TestCase(5, 10, 1, 1, 1, 3, 10, Result = 2)]\n\t\t[TestCase(5, 10, 1, 1, 1, 2, 10, Result = 3)]\n\t\t[TestCase(5, 10, 1, 1, 1, 1, 10, Result = 5)]\n\t\t[TestCase(1, 10, 1, 1, 1, 5, 10, Result = 0)]\n\t\t[TestCase(2, 10, 1, 1, 1, 5, 10, Result = 0)]\n\t\t[TestCase(3, 10, 1, 1, 1, 5, 10, Result = 1)]\n\t\t[TestCase(4, 10, 1, 1, 1, 5, 10, Result = 1)]\n\t\t[TestCase(5, 9, 1, 1, 1, 5, 10, Result = 1)]\n\t\t[TestCase(5, 8, 1, 1, 1, 5, 10, Result = 1)]\n\t\t[TestCase(5, 7, 1, 1, 1, 5, 10, Result = 1)]\n\t\t[TestCase(5, 6, 1, 1, 1, 5, 10, Result = 1)]\n\t\t[TestCase(5, 5, 1, 1, 1, 5, 10, Result = 1)]\n\t\t[TestCase(5, 4, 1, 1, 1, 5, 10, Result = 0)]\n\t\t[TestCase(5, 3, 1, 1, 1, 5, 10, Result = 0)]\n\t\t[TestCase(5, 2, 1, 1, 1, 5, 10, Result = 0)]\n\t\t[TestCase(5, 1, 1, 1, 1, 5, 10, Result = 0)]\n\t\t[TestCase(5, 10, 1.5, 1, 1, 5, 10, Result = 2)]\n\t\t[TestCase(5, 10, 1.5, 1, 1, 4, 10, Result = 2)]\n\t\t[TestCase(5, 10, 1.5, 1, 1, 3, 10, Result = 3)]\n\t\t[TestCase(5, 10, 1.5, 1, 1, 2, 10, Result = 4)]\n\t\t[TestCase(5, 10, 1.5, 1, 1, 1, 10, Result = 8)]\n\t\tpublic int TestDefendingUnitHealthPointsAfterAttack(\n\t\t\tint attackingUnitAttackPoints,\n\t\t\tint attackingUnitHealthPoints,\n\t\t\tdecimal attackingMultiplier,\n\t\t\tdecimal terrainDefenseMultiplier,\n\t\t\tdecimal luckMultiplier,\n\t\t\tint defendingUnitDefensePoints,\n\t\t\tint defendingUnitHealthPoints)\n\t\t{\n\t\t\t_AttackingUnit.AttackPoints = attackingUnitAttackPoints;\n\t\t\t_AttackingUnit.HealthPoints = attackingUnitHealthPoints;\n\n\t\t\t_AttackMultiplier.Multiplier = attackingMultiplier;\n\t\t\t_DefenseMultiplierDefendingUnit.Multiplier = terrainDefenseMultiplier;\n\n\t\t\t_DefendingUnit.DefensePoints = defendingUnitDefensePoints;\n\t\t\t_DefendingUnit.HealthPoints = defendingUnitHealthPoints;\n\n\t\t\t_RandomNumberGeneratorServiceMock\n\t\t\t\t\t.Setup(r => r.GetRandomNumber(It.IsAny(), It.IsAny()))\n\t\t\t\t\t.Returns((int)(luckMultiplier * 100));\n\n\t\t\t_Command.Execute();\n\n\t\t\treturn defendingUnitHealthPoints - _DefendingUnit.HealthPoints;\n\t\t}\n\n\t\t[Test]\n\t\tpublic void IfUnitBeingAttackedGetsKilledAndIsCarryingOtherUnitsThoseUnitsShouldAlsoBeKilled()\n\t\t{\n\t\t\tSetUpKillingAttackCommand();\n\n\t\t\t_Command.Execute();\n\n\t\t\tAssert.IsTrue(_DefendingUnit.HealthPoints <= 0);\n\t\t\tAssert.AreEqual(0, _CarriedUnit.HealthPoints);\n\t\t\tAssert.AreEqual(UnitState.Destroyed, _CarriedUnit.State);\n\t\t}\n\n\t\t[Test]\n\t\tpublic void IfUnitGetsHealthBellowZeroShouldHaveItsStateChangedToBeingDestroyed()\n\t\t{\n\t\t\tSetUpKillingAttackCommand();\n\n\t\t\t_Command.Execute();\n\n\t\t\tAssert.AreEqual(UnitState.BeingDestroyed, _DefendingUnit.State);\n\t\t}\n\n\t\t[Test]\n\t\tpublic void ExecuteCommandShouldSetAttackingUnitStateToIdle ()\n\t\t{\n\t\t\tSetUpKillingAttackCommand();\n\n\t\t\t_Command.Execute();\n\n\t\t\tAssert.AreEqual(UnitState.Idle, _AttackingUnit.State);\n\t\t}\n\n\t\tprivate void SetUpKillingAttackCommand()\n\t\t{\n\t\t\t_AttackingUnit.AttackPoints = 9999;\n\t\t\t_AttackingUnit.HealthPoints = 9999;\n\n\t\t\t_AttackMultiplier.Multiplier = 1;\n\t\t\t_DefenseMultiplierDefendingUnit.Multiplier = 1;\n\n\t\t\t_DefendingUnit.DefensePoints = 1;\n\t\t\t_DefendingUnit.HealthPoints = 1;\n\n\t\t\t_RandomNumberGeneratorServiceMock\n\t\t\t\t\t.Setup(r => r.GetRandomNumber(It.IsAny(), It.IsAny()))\n\t\t\t\t\t.Returns(100);\n\t\t}\n\n\t\t[Test]\n\t\tpublic void ExecuteCommandShouldSetGameStateToIdle()\n\t\t{\n\t\t\tSetUpKillingAttackCommand();\n\n\t\t\t_Map.GameState = GameState.UnitAttacking;\n\n\t\t\t_Command.Execute();\n\n\t\t\tAssert.AreEqual(GameState.Idle, _Map.GameState);\n\t\t}\n\n\t\t[Test]\n\t\tpublic void UnitsBeingCarriedShouldNotBeSelectedAsDefendingUnit()\n\t\t{\n\t\t\tSetUpKillingAttackCommand();\n\n\t\t\t_Map.Units.Remove (_DefendingUnit);\n\t\t\t_Map.Units.Add (_DefendingUnit);\n\t\t\t\n\t\t\t_CarriedUnit.X = _DefendingUnit.X;\n\t\t\t_CarriedUnit.Y = _DefendingUnit.Y;\n\n\t\t\t_Command.Execute();\n\n\t\t\tAssert.IsFalse(_DefendingUnit.Active);\n\t\t}\n\t}\n}\n\n", "meta": {"content_hash": "6d4e799356a3df83d65f3e57e5f0f516", "timestamp": "", "source": "github", "line_count": 245, "max_line_length": 96, "avg_line_length": 26.30204081632653, "alnum_prop": 0.6814090626939789, "repo_name": "t-recx/Rothko", "id": "f0f3e6bdeb1854fb44abdbad2b1ed45de4c80b69", "size": "6444", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Rothko.Tests/Commands/AttackCommandTests.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "832326"}]}} {"text": "\ufeffusing DragonSpark.Text;\nusing Markdig;\nusing Markdig.Renderers;\nusing SmartFormat;\nusing System.IO;\nusing System.Text;\n\nnamespace DragonSpark.Application.Messaging;\n\npublic class MarkdownEmailTemplate : IFormatter where T : notnull\n{\n\treadonly string _template;\n\n\tprotected MarkdownEmailTemplate(byte[] data) : this(Encoding.UTF8.GetString(data)) {}\n\n\tprotected MarkdownEmailTemplate(string template) => _template = template;\n\n\tpublic string Get(T parameter)\n\t{\n\t\tvar content = Smart.Format(_template, parameter);\n\t\tvar pipeline = new MarkdownPipelineBuilder().Build();\n\t\tvar markdown = Markdown.Parse(content, pipeline);\n\t\tusing var writer = new StringWriter();\n\t\tvar renderer = new HtmlRenderer(writer);\n\n\t\tpipeline.Setup(renderer);\n\t\trenderer.Render(markdown);\n\n\t\tvar result = writer.ToString();\n\t\treturn result;\n\t}\n}", "meta": {"content_hash": "a90e8fffaeade1285e68fb308a0a7eee", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 86, "avg_line_length": 26.6875, "alnum_prop": 0.7330210772833724, "repo_name": "DragonSpark/Framework", "id": "7d2a367e52dc451c643279466c2677a8a03c0455", "size": "856", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "DragonSpark.Application/Messaging/MarkdownEmailTemplate.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "2079497"}, {"name": "CSS", "bytes": "673"}, {"name": "HTML", "bytes": "103546"}, {"name": "JavaScript", "bytes": "1311"}, {"name": "TypeScript", "bytes": "2495"}]}} {"text": "/* @flow */\nimport React from 'react';\nimport Icon from 'mineral-ui/Icon';\n\nimport type { IconProps } from 'mineral-ui/Icon/types';\n\n/* eslint-disable prettier/prettier */\nexport default function IconTrendingFlat(props: IconProps) {\n const iconProps = {\n rtl: true,\n ...props\n };\n\n return (\n \n \n \n \n \n );\n}\n\nIconTrendingFlat.displayName = 'IconTrendingFlat';\nIconTrendingFlat.category = 'action';\n", "meta": {"content_hash": "604ed3a973322110cb450160ad49f68d", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 60, "avg_line_length": 20.833333333333332, "alnum_prop": 0.642, "repo_name": "mineral-ui/mineral-ui", "id": "13fda35f9455c000ba3c83ee973168843dcaf16d", "size": "500", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/mineral-ui-icons/src/IconTrendingFlat.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "9584"}, {"name": "HTML", "bytes": "6548"}, {"name": "JavaScript", "bytes": "2408689"}]}} {"text": "package centreon::common::emc::navisphere::mode::faults;\n\nuse base qw(centreon::plugins::mode);\n\nuse strict;\nuse warnings;\n\nsub new {\n my ($class, %options) = @_;\n my $self = $class->SUPER::new(package => __PACKAGE__, %options);\n bless $self, $class;\n \n $options{options}->add_options(arguments =>\n { \n });\n\n return $self;\n}\n\nsub check_options {\n my ($self, %options) = @_;\n $self->SUPER::init(%options);\n}\n\nsub run {\n my ($self, %options) = @_;\n my $clariion = $options{custom};\n \n my $response = $clariion->execute_command(cmd => 'faults -list', secure_only => 1);\n chomp $response;\n \n if ($response =~ /The array is operating normally/msg) {\n $self->{output}->output_add(severity => 'ok',\n short_msg => 'The array is operating normally');\n } else {\n $self->{output}->output_add(long_msg => $response);\n $self->{output}->output_add(severity => 'critical',\n short_msg => 'Problem detected (see detailed output for more details');\n }\n \n $self->{output}->display();\n $self->{output}->exit();\n}\n\n1;\n\n__END__\n\n=head1 MODE\n\nDetect faults on the array.\n\n=over 8\n\n=back\n\n=cut\n", "meta": {"content_hash": "10a72fe84164bebf091b4d7bd81810c1", "timestamp": "", "source": "github", "line_count": 57, "max_line_length": 107, "avg_line_length": 22.50877192982456, "alnum_prop": 0.5276695245518317, "repo_name": "centreon/centreon-plugins", "id": "c82a0c097bb01cc0e6f09fb6bcb4c57221adacab", "size": "2043", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "centreon/common/emc/navisphere/mode/faults.pm", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "719"}, {"name": "Perl", "bytes": "21731182"}]}} {"text": "ACCEPTED\n\n#### According to\nIndex Fungorum\n\n#### Published in\nnull\n\n#### Original name\nOpegrapha urosperma var. substellata Redinger\n\n### Remarks\nnull", "meta": {"content_hash": "37112b011a3115e21bd00371621d378e", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 45, "avg_line_length": 11.538461538461538, "alnum_prop": 0.7333333333333333, "repo_name": "mdoering/backbone", "id": "1c839667d5252f95ecd899d67bf20cbdafd4c512", "size": "219", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Arthoniomycetes/Arthoniales/Roccellaceae/Opegrapha/Opegrapha urosperma/Opegrapha urosperma substellata/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "var autoSignIn = function(mode) {\n if (cmapiAvailable) {\n return navigator.credentials.get({\n // TODO 8-4: Reflect a silent access\n password: true\n }).then(function(cred) {\n if (cred) {\n var form = new FormData();\n var csrf_token = document.querySelector('#csrf_token').value;\n form.append('csrf_token', csrf_token);\n\n switch (cred.type) {\n case 'password':\n form.append('email', cred.id);\n form.append('password', cred.password);\n return fetch('/auth/password', {\n method: 'POST',\n credentials: 'include',\n body: form\n });\n }\n return Promise.reject();\n } else {\n return Promise.reject();\n }\n }).then(function(res) {\n if (res.status === 200) {\n return Promise.resolve();\n } else {\n return Promise.reject();\n }\n });\n } else {\n return Promise.reject();\n }\n};\n// TODO 7-1: Sign-In a user upon landing the page\n", "meta": {"content_hash": "7de7adefe33127422255c46659aa8c8c", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 69, "avg_line_length": 27.56756756756757, "alnum_prop": 0.5205882352941177, "repo_name": "googlecodelabs/credential-management-api", "id": "56766ba8e89debefc7bb9ccff55fccb65acec421", "size": "1020", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "step06/static/scripts/auto.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "HTML", "bytes": "5384419"}, {"name": "JavaScript", "bytes": "47506"}, {"name": "Python", "bytes": "59500"}]}} {"text": "// Copyright 2016-2022, University of Colorado Boulder\n\n/**\n * Property whose value must be true or false. Truthy/falsy values are invalid.\n *\n * @author Sam Reid (PhET Interactive Simulations)\n * @author Chris Malley (PixelZoom, Inc.)\n */\n\nimport optionize, { EmptySelfOptions } from '../../phet-core/js/optionize.js';\nimport StrictOmit from '../../phet-core/js/types/StrictOmit.js';\nimport BooleanIO from '../../tandem/js/types/BooleanIO.js';\nimport axon from './axon.js';\nimport Property, { PropertyOptions } from './Property.js';\n\ntype SelfOptions = EmptySelfOptions;\n\n// client cannot specify superclass options that are controlled by BooleanProperty\nexport type BooleanPropertyOptions = SelfOptions & StrictOmit, 'isValidValue' | 'valueType' | 'phetioValueType'>;\n\nexport default class BooleanProperty extends Property {\n\n public constructor( value: boolean, providedOptions?: BooleanPropertyOptions ) {\n\n // Fill in superclass options that are controlled by BooleanProperty.\n const options = optionize>()( {\n valueType: 'boolean',\n phetioValueType: BooleanIO\n }, providedOptions );\n\n super( value, options );\n }\n\n public toggle(): void {\n this.value = !this.value;\n }\n}\n\naxon.register( 'BooleanProperty', BooleanProperty );", "meta": {"content_hash": "e8a23d1cddb84f274fec18ffa493257c", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 138, "avg_line_length": 34.743589743589745, "alnum_prop": 0.7357933579335794, "repo_name": "phetsims/axon", "id": "bdb01e38dcb468ddb5fc90c93960c53ce807de2a", "size": "1355", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "js/BooleanProperty.ts", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "4146"}, {"name": "JavaScript", "bytes": "22970"}, {"name": "TypeScript", "bytes": "329832"}]}} {"text": "FROM balenalib/jetson-nano-fedora:33-build\n\nENV NODE_VERSION 15.14.0\nENV YARN_VERSION 1.22.4\n\nRUN for key in \\\n\t6A010C5166006599AA17F08146C2130DFD2497F5 \\\n\t; do \\\n\t\tgpg --keyserver pgp.mit.edu --recv-keys \"$key\" || \\\n\t\tgpg --keyserver keyserver.pgp.com --recv-keys \"$key\" || \\\n\t\tgpg --keyserver ha.pool.sks-keyservers.net --recv-keys \"$key\" ; \\\n\tdone \\\n\t&& curl -SLO \"http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-arm64.tar.gz\" \\\n\t&& echo \"6d5e0074fe4a45d444bc581aa1fd7ce7081b8491b0f785414a6e5cc30c42854a node-v$NODE_VERSION-linux-arm64.tar.gz\" | sha256sum -c - \\\n\t&& tar -xzf \"node-v$NODE_VERSION-linux-arm64.tar.gz\" -C /usr/local --strip-components=1 \\\n\t&& rm \"node-v$NODE_VERSION-linux-arm64.tar.gz\" \\\n\t&& curl -fSLO --compressed \"https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz\" \\\n\t&& curl -fSLO --compressed \"https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc\" \\\n\t&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \\\n\t&& mkdir -p /opt/yarn \\\n\t&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \\\n\t&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \\\n\t&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \\\n\t&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \\\n\t&& npm config set unsafe-perm true -g --unsafe-perm \\\n\t&& rm -rf /tmp/*\n\nCMD [\"echo\",\"'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs\"]\n\n RUN curl -SLO \"https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh\" \\\n && echo \"Running test-stack@node\" \\\n && chmod +x test-stack@node.sh \\\n && bash test-stack@node.sh \\\n && rm -rf test-stack@node.sh \n\nRUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \\nArchitecture: ARM v8 \\nOS: Fedora 33 \\nVariant: build variant \\nDefault variable(s): UDEV=off \\nThe following software stack is preinstalled: \\nNode.js v15.14.0, Yarn v1.22.4 \\nExtra features: \\n- Easy way to install packages with `install_packages ` command \\n- Run anywhere with cross-build feature (for ARM only) \\n- Keep the container idling with `balena-idle` command \\n- Show base image details with `balena-info` command' > /.balena/messages/image-info\n\nRUN echo $'#!/bin/sh.real\\nbalena-info\\nrm -f /bin/sh\\ncp /bin/sh.real /bin/sh\\n/bin/sh \"$@\"' > /bin/sh-shim \\\n\t&& chmod +x /bin/sh-shim \\\n\t&& cp /bin/sh /bin/sh.real \\\n\t&& mv /bin/sh-shim /bin/sh", "meta": {"content_hash": "eb72559500ecf971f1c4e10f93954ff3", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 691, "avg_line_length": 66.65853658536585, "alnum_prop": 0.7083790706183681, "repo_name": "nghiant2710/base-images", "id": "8bf87e3dd50f9b2e6725744ad4056b6ff0cc30cf", "size": "2754", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/node/jetson-nano/fedora/33/15.14.0/build/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Dockerfile", "bytes": "144558581"}, {"name": "JavaScript", "bytes": "16316"}, {"name": "Shell", "bytes": "368690"}]}} {"text": "\n\n#include \n\n#include \"iup.h\"\n\n#include \"iup_object.h\"\n#include \"iup_attrib.h\"\n#include \"iup_str.h\"\n#include \"iup_dialog.h\"\n\n#include \"iup_drv.h\"\n#include \"iupwin_drv.h\"\n#include \"iupwin_str.h\"\n\n\nstatic void winMessageDlgHelpCallback(HELPINFO* HelpInfo)\n{\n Ihandle* ih = (Ihandle*)HelpInfo->dwContextId;\n Icallback cb = (Icallback)IupGetCallback(ih, \"HELP_CB\");\n if (cb && cb(ih) == IUP_CLOSE)\n {\n if (iupStrEqualNoCase(iupAttribGetStr(ih, \"BUTTONS\"), \"OK\")) /* only one button */\n EndDialog((HWND)HelpInfo->hItemHandle, IDOK);\n else\n EndDialog((HWND)HelpInfo->hItemHandle, IDCANCEL);\n }\n}\n\nstatic int winMessageDlgPopup(Ihandle* ih, int x, int y)\n{\n InativeHandle* parent = iupDialogGetNativeParent(ih);\n int result, num_but = 2;\n DWORD dwStyle = MB_TASKMODAL;\n char *icon, *buttons;\n (void)x;\n (void)y;\n\n /* if parent is used then it will be modal only relative to it */\n /* if (!parent)\n parent = GetActiveWindow(); */\n\n icon = iupAttribGetStr(ih, \"DIALOGTYPE\");\n if (iupStrEqualNoCase(icon, \"ERROR\"))\n dwStyle |= MB_ICONERROR;\n else if (iupStrEqualNoCase(icon, \"WARNING\"))\n dwStyle |= MB_ICONWARNING;\n else if (iupStrEqualNoCase(icon, \"INFORMATION\"))\n dwStyle |= MB_ICONINFORMATION;\n else if (iupStrEqualNoCase(icon, \"QUESTION\"))\n dwStyle |= MB_ICONQUESTION;\n\n buttons = iupAttribGetStr(ih, \"BUTTONS\");\n if (iupStrEqualNoCase(buttons, \"OKCANCEL\"))\n dwStyle |= MB_OKCANCEL;\n else if (iupStrEqualNoCase(buttons, \"YESNO\"))\n dwStyle |= MB_YESNO;\n else\n {\n dwStyle |= MB_OK;\n num_but = 1;\n }\n\n if (IupGetCallback(ih, \"HELP_CB\"))\n dwStyle |= MB_HELP;\n\n if (num_but == 2 && iupAttribGetInt(ih, \"BUTTONDEFAULT\") == 2)\n dwStyle |= MB_DEFBUTTON2;\n else\n dwStyle |= MB_DEFBUTTON1;\n\n {\n MSGBOXPARAMS MsgBoxParams;\n MsgBoxParams.cbSize = sizeof(MSGBOXPARAMS);\n MsgBoxParams.hwndOwner = parent;\n MsgBoxParams.hInstance = NULL;\n MsgBoxParams.lpszText = iupwinStrToSystem(iupAttribGet(ih, \"VALUE\"));\n MsgBoxParams.lpszCaption = iupwinStrToSystem(iupAttribGet(ih, \"TITLE\"));\n MsgBoxParams.dwStyle = dwStyle;\n MsgBoxParams.lpszIcon = NULL;\n MsgBoxParams.dwContextHelpId = (DWORD_PTR)ih;\n MsgBoxParams.lpfnMsgBoxCallback = (MSGBOXCALLBACK)winMessageDlgHelpCallback;\n MsgBoxParams.dwLanguageId = MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT);\n\n result = MessageBoxIndirect(&MsgBoxParams);\n }\n\n if (result == 0)\n {\n iupAttribSet(ih, \"BUTTONRESPONSE\", NULL);\n return IUP_ERROR;\n }\n\n if (result == IDNO || result == IDCANCEL)\n iupAttribSet(ih, \"BUTTONRESPONSE\", \"2\");\n else\n iupAttribSet(ih, \"BUTTONRESPONSE\", \"1\");\n\n return IUP_NOERROR;\n}\n\nvoid iupdrvMessageDlgInitClass(Iclass* ic)\n{\n ic->DlgPopup = winMessageDlgPopup;\n}\n\n/* \nIn Windows it will always sound a beep. The beep is different for each dialog type.\n*/\n", "meta": {"content_hash": "cc08d3cb78c48be79fdb4b402c86c4d8", "timestamp": "", "source": "github", "line_count": 109, "max_line_length": 86, "avg_line_length": 26.08256880733945, "alnum_prop": 0.6820260288427717, "repo_name": "sanikoyes/iup", "id": "3090312abfb4d6ad094d86fad44862978fc88022", "size": "2945", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/win/iupwin_messagedlg.c", "mode": "33188", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "34"}, {"name": "Batchfile", "bytes": "10717"}, {"name": "C", "bytes": "9868012"}, {"name": "C++", "bytes": "6978963"}, {"name": "CSS", "bytes": "14107"}, {"name": "HTML", "bytes": "2240422"}, {"name": "Lua", "bytes": "682394"}, {"name": "Makefile", "bytes": "139721"}, {"name": "Shell", "bytes": "12420"}]}} {"text": "Sync Facebook Events Calendar to Google Calendar.\n\nFor Facebook and Google Calendar users\n\nWho want to be super organised and not miss out on events or double book themselves\n\nFacebookGoogleCalendarSync\n\nIs a gem\n\nThat imports Facebook events into Google Calendar.\n\nUnlike the existing \"import the iCal URL provided by Facebook\" solution\n\nThis gem allows the user to delete events that they are not interested in without going to Facebook to click \"Not Going\",\n\nWhile also allowing synchronisation to be reliably and regularly scheduled* using cron or similar.\n\nIt also displays the details of the \"private\" Facebook events which would otherwise be hidden by Google Calendar.\n\n*Google Calendar updates external calendars at unpredictable times, far too rarely (often more than 24 hours between updates) and doesn't allow manual refreshes. It also doesn't notify you when it has been unable to update your calendar, so you don't know when you're looking at an out of date version.\n\n## Installation\n\n $ gem install facebook-google-calendar-sync\n\nCreate a Goggle permissions file to allow the gem to access your Google Calendar. This command will open a browser window. (Have found this did not work in jruby, tested succesfully in MRI ruby-1.9.3-p392)\n\n $ bundle exec google-api oauth-2-login --scope=https://www.googleapis.com/auth/calendar --client-id=436161995365.apps.googleusercontent.com --client-secret=WgTEjg-b8rXCRL28hweLcSuV\n\nYou will now have a .google-api.yaml file in your home directory. Note: the gem can only access your calendar data if it has this file. The synchronisation process will run locally on your computer - no external service will have access to your Google Calendar.\n\nFor more information on the last step see https://developers.google.com/google-apps/calendar/firstapp#register and the Ruby tab on https://developers.google.com/google-apps/calendar/instantiate\n\n## Usage\n\nYou can find your Facebook iCal URL by going to your Events page, and clicking the cog icon and selecting Export. Copy the URL from the \"upcoming events\" link, and change the \"webcal://\" prefix to \"http://\".\n\nTo run:\n\n $ bundle exec facebook-google-calendar-sync -f \"http://www.facebook.com/ical/u.php?uid=12345&key=67890\"\n\nIf your Google API YAML file isn't stored at ~/.google-api.yaml, you can specify the location using the command line option \"-c\"\n\nBy default, your events will be synchronised to a calendar called \"My Facebook Events\". If this does not exist, it will be created using the timezone of your primary calendar. You can specify the name of the calendar (which may be a pre-existing one) using the command line option \"-n\"\n\n## Long term usage\n\nYou will probably want to set this up as a cron job or similar. See the examples directory for more information.\n\n## Known issues\n\nWhen a Facebook event does not have a location, the time in the iCal export will be up to 2 days ahead of the actual date displayed in Facebook. This behaviour can also be observed in the Android mobile client. This may be because the timezone is incorrectly set when there is no location.\n\nIf a Facebook event has synchronised to your Google calendar then deleted, if the synchronisation process attempts (incorrectly) to add it again, Google Calendar will throw an exception saying that an event with this identifier already exists.\n\n## Contributing\n\n1. Fork it\n2. Create your feature branch (`git checkout -b my-new-feature`)\n3. Commit your changes (`git commit -am 'Add some feature'`)\n4. Push to the branch (`git push origin my-new-feature`)\n5. Create new Pull Request\n\n## TODO\n\n* Tests....\n* Work out if there is a way to fix the event date when there is no location.\n", "meta": {"content_hash": "ceb8b37f67696d23c8179c5f440009d2", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 302, "avg_line_length": 54.0735294117647, "alnum_prop": 0.7813434865379385, "repo_name": "bethesque/facebook-google-calendar-sync", "id": "13e25798c6504df93563fb1cbab6e02899efb3d9", "size": "3707", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [{"name": "Ruby", "bytes": "21511"}]}} {"text": "\n\n## Agivest \nAgivest is a populous investment platform with a Crowd Investment system that allows everyone to invest from a hundred thousand rupiah.\n```\nSelect the comodities you want invest\nInput nominal invest\nTransfer Money\n```\n## Architecture\nMVC (Model View Controller) - Code Igniter\n\n## Library Used\n- Admin LTE\n\n## Running\n- Clone this repository\n- Activate your xamp / wamp\n- Open your browser and type localhost/agivest\n\n## Preview\ncheck website : -\n", "meta": {"content_hash": "3f573c569132827bbb5e554a793c0432", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 135, "avg_line_length": 26.318181818181817, "alnum_prop": 0.768566493955095, "repo_name": "kahell/agivest", "id": "c3fbb88080027a0368cab380375da7c762891627", "size": "589", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "808277"}, {"name": "HTML", "bytes": "12170319"}, {"name": "JavaScript", "bytes": "2243760"}, {"name": "PHP", "bytes": "2923840"}, {"name": "Shell", "bytes": "4440"}]}} {"text": "\r\nusing namespace std;\r\n\r\nMiddleEarSimpleFilter::MiddleEarSimpleFilter()\r\n{\r\n\tmemset(this, 0, sizeof(MiddleEarSimpleFilter));\r\n\tmLogger = new Logger();\r\n\tSetModuleName(\"MiddleEarSimpleFilter\");\r\n}\r\n\r\nMiddleEarSimpleFilter::~MiddleEarSimpleFilter()\r\n{\r\n\tif (mModuleName != NULL)\r\n\t\tdelete [] mModuleName;\r\n}\r\n\r\nint MiddleEarSimpleFilter::ReadParameters(char *ParameterFileName)\r\n{\r\n\tif (mModuleName == NULL)\r\n\t\treturn ReadParameters(ParameterFileName, \"MiddleEarSimpleFilter\");\r\n\telse\r\n\t\treturn ReadParameters(ParameterFileName, mModuleName);\r\n}\r\n\r\nint MiddleEarSimpleFilter::ReadParameters(char *ParameterFileName, char *SectionName)\r\n{\r\n\tCParameterFile theParamFile(ParameterFileName);\r\n\tParameterStatus Status;\r\n\r\n\tmLogger->Log(\" ReadParameters: %s \\\"%s\\\"\", mModuleName, ParameterFileName);\r\n\r\n\t// Number of channels and Frame Size are passed as parameters to Start, see that function for details\r\n\tStatus = theParamFile.GetParameter(SectionName, \"SampleRate_Hz\", mSampleRate_Hz, 0);\r\n\tStatus = theParamFile.GetParameter(SectionName, \"HighpassCornerFreq_Hz\", mHighpassCornerFreq_Hz, 500.0);\r\n\tStatus = theParamFile.GetParameter(SectionName, \"HighpassFilterOrder\", mHighpassFilterOrder, 2);\r\n\tStatus = theParamFile.GetParameter(SectionName, \"Gain\", mGain, 30.0);\r\n\treturn 1;\r\n}\r\n\r\nint MiddleEarSimpleFilter::Start(int NumInputs, EarlabDataStreamType InputTypes[EarlabMaxIOStreamCount], int InputSize[EarlabMaxIOStreamCount][EarlabMaxIOStreamDimensions], \r\n\t\t\tint NumOutputs, EarlabDataStreamType OutputTypes[EarlabMaxIOStreamCount], int OutputSize[EarlabMaxIOStreamCount][EarlabMaxIOStreamDimensions],\r\n\t\t\tunsigned long OutputElementCounts[EarlabMaxIOStreamCount])\r\n{\r\n\tint i;\r\n\r\n\tmLogger->Log(\" Start: %s\", mModuleName);\r\n\t// Perform some validation on my parameters to make sure I can handle the requested input and output streams...\r\n\tif (NumInputs != 1)\r\n\t\tthrow EarlabException(\"%s: Currently this module can only handle one input stream. Sorry!\", mModuleName);\r\n\r\n\tif (NumOutputs != 1)\r\n\t\tthrow EarlabException(\"%s: Currently this module can only handle one output stream. Sorry!\", mModuleName);\r\n\r\n\tif (InputTypes[0] != WaveformData)\r\n\t\tthrow EarlabException(\"%s: Currently this module can only handle waveform input data streams. Sorry!\", mModuleName);\r\n\r\n\tif (OutputTypes[0] != WaveformData)\r\n\t\tthrow EarlabException(\"%s: Currently this module can only handle waveform output data streams. Sorry!\", mModuleName);\r\n\r\n\tif (InputSize[0][0] != OutputSize[0][0])\r\n\t\tthrow EarlabException(\"%s: Input and output frame lengths must be identical. Sorry!\", mModuleName);\r\n\r\n\tif (InputSize[0][1] != 0)\r\n\t\tthrow EarlabException(\"%s: Input data must be one-dimensional array. Sorry!\", mModuleName);\r\n\r\n\tif (OutputSize[0][1] != 0)\r\n\t\tthrow EarlabException(\"%s: Output signal must be one-dimensional array. Sorry!\", mModuleName);\r\n\r\n\tOutputElementCounts[0] = OutputSize[0][0];\r\n\r\n\tmFrameSize_Samples = OutputSize[0][0];\r\n\tmHighpassFilter = new FirstOrderHighpass[mHighpassFilterOrder];\r\n\tfor (i = 0; i < mHighpassFilterOrder; i++)\r\n\t{\r\n\t\tmHighpassFilter[i].SetSampleRate_Hz(mSampleRate_Hz);\r\n\t\tmHighpassFilter[i].SetCornerFrequency_Hz(mHighpassCornerFreq_Hz);\r\n\t}\r\n\treturn 1;\r\n}\r\n\r\nint MiddleEarSimpleFilter::Advance(EarlabDataStream *InputStream[EarlabMaxIOStreamCount], EarlabDataStream *OutputStream[EarlabMaxIOStreamCount])\r\n{\r\n int i, j;\r\n\tdouble CurSample;\r\n\tFloatMatrixN *Input, *Output;\r\n\r\n\tmLogger->Log(\" Advance: %s\", mModuleName);\r\n\r\n\tInput = ((EarlabWaveformStream *)InputStream[0])->GetData();\t// Only supporting one output at the present moment\r\n\tOutput = ((EarlabWaveformStream *)OutputStream[0])->GetData();\t// Only supporting one output at the present moment\r\n\r\n\tif (Input->Rank(0) != mFrameSize_Samples)\r\n\t\tthrow EarlabException(\"%s: Input size mismatch with Start()\", mModuleName);\r\n\tif (Output->Rank(0) != mFrameSize_Samples)\r\n\t\tthrow EarlabException(\"%s: Output size mismatch with Start()\", mModuleName);\r\n\r\n for (j = 0; j < mFrameSize_Samples; j++)\r\n {\r\n\t\tCurSample = Input->Data(j);\r\n\t\tfor (i = 0; i < mHighpassFilterOrder; i++)\r\n\t\t\tCurSample = mHighpassFilter[i].Filter(CurSample);\r\n Output->Data(j) = (float)(CurSample * mGain);\r\n }\r\n\r\n return j;\r\n}\r\n\r\nint MiddleEarSimpleFilter::Stop(void)\r\n{\r\n\tmLogger->Log(\" Stop: %s\", mModuleName);\r\n\treturn 1;\r\n}\r\n\r\nint MiddleEarSimpleFilter::Unload(void)\r\n{\r\n\tmLogger->Log(\" Unload: %s\", mModuleName);\r\n\treturn 1;\r\n}\r\n\r\nvoid MiddleEarSimpleFilter::SetModuleName(char *ModuleName)\r\n{\r\n\tif (mModuleName != NULL)\r\n\t\tdelete [] mModuleName;\r\n\r\n\tmModuleName = new char[strlen(ModuleName) + 1];\r\n\tstrcpy(mModuleName, ModuleName);\r\n}\r\n\r\nvoid MiddleEarSimpleFilter::SetLogger(Logger *TheLogger)\r\n{\r\n\tif (mLogger != NULL)\r\n\t\tdelete mLogger;\r\n\tmLogger = TheLogger;\r\n}\r\n", "meta": {"content_hash": "e243bbabd83dae712c3addf03d058f57", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 175, "avg_line_length": 35.54477611940298, "alnum_prop": 0.726222968717195, "repo_name": "AuditoryBiophysicsLab/EarLab", "id": "c9b235585df0551e3d40148cbbb708f0924894e5", "size": "4992", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tags/release-2.0/Modules/MiddleEarSimpleFilter/MiddleEarSimpleFilter.cpp", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "2897"}, {"name": "C", "bytes": "2948189"}, {"name": "C#", "bytes": "2669637"}, {"name": "C++", "bytes": "13833969"}, {"name": "HTML", "bytes": "100731"}, {"name": "Inno Setup", "bytes": "76845"}, {"name": "MATLAB", "bytes": "155995"}, {"name": "Makefile", "bytes": "414201"}]}} {"text": "\n\n#ifndef msvolume_h\n#define msvolume_h\n\n#include \"msfilter.h\"\n\n/**\n * The Volume MSFilter can do:\n * \t- measurements of the input signal power, returned in dbm0 or linear scale\n * \t- apply a gain to the input signal and output this amplified signal to its output.\n * By default gain is 1, in which case the filter does not modify the signal (and even does not\n * copy the buffers, just post them on its output queue.\n**/\n\n\n/*returns a volume meter in db0 (max=0 db0)*/\n#define MS_VOLUME_GET\t\tMS_FILTER_METHOD(MS_VOLUME_ID,0,float)\n/*returns a volume in linear scale between 0 and 1 */\n#define MS_VOLUME_GET_LINEAR\t\tMS_FILTER_METHOD(MS_VOLUME_ID,1,float)\n/* set a gain */\n#define MS_VOLUME_SET_GAIN\t\tMS_FILTER_METHOD(MS_VOLUME_ID,2,float)\n\n#define MS_VOLUME_GET_EA_STATE\t\tMS_FILTER_METHOD(MS_VOLUME_ID,3, int)\n\n#define MS_VOLUME_SET_PEER\t\tMS_FILTER_METHOD(MS_VOLUME_ID,4, MSFilter )\n\n#define MS_VOLUME_SET_EA_THRESHOLD\tMS_FILTER_METHOD(MS_VOLUME_ID,5,float)\n\n#define MS_VOLUME_SET_EA_SPEED\t\tMS_FILTER_METHOD(MS_VOLUME_ID,6,float)\n\n#define MS_VOLUME_SET_EA_FORCE\t\tMS_FILTER_METHOD(MS_VOLUME_ID,7,float)\n\n#define MS_VOLUME_ENABLE_AGC\t\tMS_FILTER_METHOD(MS_VOLUME_ID,8,int)\n\n#define MS_VOLUME_ENABLE_NOISE_GATE\tMS_FILTER_METHOD(MS_VOLUME_ID,9,int)\n\n#define MS_VOLUME_SET_NOISE_GATE_THRESHOLD\tMS_FILTER_METHOD(MS_VOLUME_ID,10,float)\n\n#define MS_VOLUME_SET_EA_SUSTAIN\tMS_FILTER_METHOD(MS_VOLUME_ID,11,int)\n\n#define MS_VOLUME_SET_NOISE_GATE_FLOORGAIN MS_FILTER_METHOD(MS_VOLUME_ID,12,float)\n\n/* set a gain in db */\n#define MS_VOLUME_SET_DB_GAIN\t\tMS_FILTER_METHOD(MS_VOLUME_ID,13,float)\n\n/* get a linear gain */\n#define MS_VOLUME_GET_GAIN\t\tMS_FILTER_METHOD(MS_VOLUME_ID,14,float)\n\nextern MSFilterDesc ms_volume_desc;\n\n#endif\n", "meta": {"content_hash": "fdbb7dd4986ee23e8f5ece23b3b04bb1", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 95, "avg_line_length": 33.01923076923077, "alnum_prop": 0.7396622015142691, "repo_name": "Huawei/eSDK_eLTE_SDK_Windows", "id": "da20da48627d51b0867b95decaadb9f1e67e4c15", "size": "2536", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/platform/SDK/include/mediastreamer2/include/mediastreamer2/msvolume.h", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "45600"}, {"name": "C", "bytes": "1941730"}, {"name": "C++", "bytes": "9810262"}, {"name": "CMake", "bytes": "3613"}, {"name": "Makefile", "bytes": "209539"}, {"name": "Objective-C", "bytes": "250083"}, {"name": "Protocol Buffer", "bytes": "9363"}]}} {"text": "\r\n#ifndef __RenderTargetListener_H__\r\n#define __RenderTargetListener_H__\r\n\r\n\r\n#include \"OgrePrerequisites.h\"\r\n\r\nnamespace Ogre {\r\n\r\n\t/** \\addtogroup Core\r\n\t* @{\r\n\t*/\r\n\t/** \\addtogroup RenderSystem\r\n\t* @{\r\n\t*/\r\n\t/** Struct containing information about a RenderTarget event.\r\n */\r\n struct RenderTargetEvent\r\n {\r\n /// The source of the event being raised\r\n RenderTarget* source;\r\n };\r\n\r\n /** Struct containing information about a RenderTarget Viewport-specific event.\r\n */\r\n struct RenderTargetViewportEvent\r\n {\r\n /// The source of the event being raised\r\n Viewport* source;\r\n };\r\n\r\n /** A interface class defining a listener which can be used to receive\r\n notifications of RenderTarget events.\r\n @remarks\r\n A 'listener' is an interface designed to be called back when\r\n particular events are called. This class defines the\r\n interface relating to RenderTarget events. In order to receive\r\n notifications of RenderTarget events, you should create a subclass of\r\n RenderTargetListener and override the methods for which you would like\r\n to customise the resulting processing. You should then call\r\n RenderTarget::addListener passing an instance of this class.\r\n There is no limit to the number of RenderTarget listeners you can register,\r\n allowing you to register multiple listeners for different purposes.\r\n

\r\n RenderTarget events occur before and after the target is updated as a whole,\r\n and before and after each viewport on that target is updated. Each RenderTarget\r\n holds it's own set of listeners, but you can register the same listener on\r\n multiple render targets if you like since the event contains details of the\r\n originating RenderTarget.\r\n */\r\n class _OgreExport RenderTargetListener\r\n {\r\n /*\r\n Note that this could have been an abstract class, but I made\r\n the explicit choice not to do this, because I wanted to give\r\n people the option of only implementing the methods they wanted,\r\n rather than having to create 'do nothing' implementations for\r\n those they weren't interested in. As such this class follows\r\n the 'Adapter' classes in Java rather than pure interfaces.\r\n */\r\n public:\r\n\t\tvirtual ~RenderTargetListener() {}\r\n /** Called just before a RenderTarget is about to be rendered into.\r\n @remarks\r\n This event is raised just before any of the viewports on the target\r\n are rendered to. You can perform manual rendering operations here if\r\n you want, but please note that if the Viewport objects attached to this\r\n target are set up to clear the background, you will lose whatever you \r\n render. If you want some kind of backdrop in this event\r\n you should turn off background clearing off on the viewports, and either\r\n clear the viewports yourself in this event handler before doing your rendering\r\n or just render over the top if you don't need to.\r\n */\r\n virtual void preRenderTargetUpdate(const RenderTargetEvent& evt)\r\n { (void)evt; }\r\n\r\n /** Called just after a RenderTarget has been rendered to.\r\n @remarks\r\n This event is called just after all the viewports attached to the target\r\n in question have been rendered to. You can perform your own manual rendering\r\n commands in this event handler if you like, these will be composited with\r\n the contents of the target already there (depending on the material settings \r\n you use etc).\r\n */\r\n virtual void postRenderTargetUpdate(const RenderTargetEvent& evt)\r\n { (void)evt; }\r\n\r\n /* Called just before a Viewport on a RenderTarget is to be updated.\r\n @remarks\r\n This method is called before each viewport on the RenderTarget is\r\n rendered to. You can use this to perform per-viewport settings changes,\r\n such as showing / hiding particular overlays.\r\n */\r\n virtual void preViewportUpdate(const RenderTargetViewportEvent& evt)\r\n { (void)evt; }\r\n\r\n /* Called just after a Viewport on a RenderTarget is to be updated.\r\n @remarks\r\n This method is called after each viewport on the RenderTarget is\r\n rendered to. \r\n */\r\n virtual void postViewportUpdate(const RenderTargetViewportEvent& evt)\r\n { (void)evt; }\r\n\r\n\t\t/** Called to notify listener that a Viewport has been added to the \r\n\t\t\ttarget in question.\r\n\t\t*/\r\n\t\tvirtual void viewportAdded(const RenderTargetViewportEvent& evt)\r\n { (void)evt; }\r\n\t\t/** Called to notify listener that a Viewport has been removed from the \r\n\t\t\ttarget in question.\r\n\t\t*/\r\n\t\tvirtual void viewportRemoved(const RenderTargetViewportEvent& evt)\r\n { (void)evt; }\r\n };\r\n\t/** @} */\r\n\t/** @} */\r\n}\r\n\r\n#endif\r\n", "meta": {"content_hash": "c160bbe7f01882dffc30934d275802ea", "timestamp": "", "source": "github", "line_count": 120, "max_line_length": 91, "avg_line_length": 42.391666666666666, "alnum_prop": 0.6457637114212699, "repo_name": "Misterblue/LookingGlass-Viewer", "id": "2e26f7cd85e5da24b3f50113bb41182822f67082", "size": "6476", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/Ogre/include/OgreRenderTargetListener.h", "mode": "33261", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "2087188"}, {"name": "C#", "bytes": "1035483"}, {"name": "C++", "bytes": "15139935"}, {"name": "JavaScript", "bytes": "7396"}, {"name": "Objective-C", "bytes": "6832"}, {"name": "Perl", "bytes": "2099"}, {"name": "Rust", "bytes": "1342"}, {"name": "Shell", "bytes": "15994"}]}} {"text": "require File.expand_path('../boot', __FILE__)\n\nrequire 'rails/all'\n\nBundler.require(*Rails.groups)\nrequire \"breakpoint_rails\"\n\nmodule Dummy\n class Application < Rails::Application\n # Settings in config/environments/* take precedence over those specified here.\n # Application configuration should go into files in config/initializers\n # -- all .rb files in that directory are automatically loaded.\n\n # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.\n # Run \"rake -D time\" for a list of tasks for finding time zone names. Default is UTC.\n # config.time_zone = 'Central Time (US & Canada)'\n\n # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.\n # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]\n # config.i18n.default_locale = :de\n\n # Do not swallow errors in after_commit/after_rollback callbacks.\n config.active_record.raise_in_transactional_callbacks = true\n end\nend\n\n", "meta": {"content_hash": "b3f876e4b12166bbf9cbc3f92d5bfff1", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 99, "avg_line_length": 39.46153846153846, "alnum_prop": 0.7173489278752436, "repo_name": "Stex/breakpoint_rails", "id": "d50222e947e79d0f9d6a602b047fb3b0dd4b1839", "size": "1026", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/dummy/config/application.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "36700"}, {"name": "HTML", "bytes": "4883"}, {"name": "JavaScript", "bytes": "596"}, {"name": "Ruby", "bytes": "17259"}]}} {"text": "import Display from \"./Display\"\nimport Editor from \"./Editor\"\nimport RenderMixin from \"../utility/RenderMixin\"\nvar ButtonInput = ReactBootstrap.ButtonInput\nvar Sample = React.createClass({\n mixins: [RenderMixin], \n propTypes: {\n path: React.PropTypes.string.isRequired,\n code: React.PropTypes.string,\n title: React.PropTypes.string.isRequired,\n desc: React.PropTypes.string.isRequired,\n isEditorVisible: React.PropTypes.bool\n },\n render: function() {\n console.log(\"Sample:render\", arguments);\n var props = this.props;\n var path = props.path;\n var isEditorVisible = props.isEditorVisible;\n return (\n
\n

{props.title}

\n

{props.desc}

\n \n { isEditorVisible? null: props.showEditor(path)} value=\"Show Code\" /> }\n \n { isEditorVisible? props.hideEditor(path)} value=\"Hide Code\" /> : null}\n

\n
\n );\n },\n // componentWillReceiveProps: function() {\n // console.log(\"Sample:componentWillReceiveProps\", arguments);\n // },\n // componentDidUpdate: function() {\n // console.log(\"Sample:componentDidUpdate\", arguments);\n // }\n});\n\nexport default Sample", "meta": {"content_hash": "706a41f7884de0cee7742ef6206b3506", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 176, "avg_line_length": 45.05128205128205, "alnum_prop": 0.6317586795674445, "repo_name": "elfandsummer/boilerplate", "id": "d213c2fe2c1107a7f522c9462281b786d67ef1ad", "size": "1757", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/components/Sample.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "1517"}, {"name": "HTML", "bytes": "1536"}, {"name": "JavaScript", "bytes": "27778"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.Linq;\nusing Microsoft.AspNet.Identity;\nusing Microsoft.AspNet.Identity.EntityFramework;\nusing Microsoft.Owin;\nusing Microsoft.Owin.Security.Cookies;\nusing Microsoft.Owin.Security.Google;\nusing Microsoft.Owin.Security.OAuth;\nusing Owin;\nusing TestWebApiApp.Providers;\nusing TestWebApiApp.Models;\n\nnamespace TestWebApiApp\n{\n public partial class Startup\n {\n public static OAuthAuthorizationServerOptions OAuthOptions { get; private set; }\n\n public static string PublicClientId { get; private set; }\n\n // For more information on configuring authentication, please visit http://go.microsoft.com/fwlink/?LinkId=301864\n public void ConfigureAuth(IAppBuilder app)\n {\n // Configure the db context and user manager to use a single instance per request\n app.CreatePerOwinContext(ApplicationDbContext.Create);\n app.CreatePerOwinContext(ApplicationUserManager.Create);\n\n // Enable the application to use a cookie to store information for the signed in user\n // and to use a cookie to temporarily store information about a user logging in with a third party login provider\n app.UseCookieAuthentication(new CookieAuthenticationOptions());\n app.UseExternalSignInCookie(DefaultAuthenticationTypes.ExternalCookie);\n\n // Configure the application for OAuth based flow\n PublicClientId = \"self\";\n OAuthOptions = new OAuthAuthorizationServerOptions\n {\n TokenEndpointPath = new PathString(\"/Token\"),\n Provider = new ApplicationOAuthProvider(PublicClientId),\n AuthorizeEndpointPath = new PathString(\"/api/Account/ExternalLogin\"),\n AccessTokenExpireTimeSpan = TimeSpan.FromDays(14),\n AllowInsecureHttp = true\n };\n\n // Enable the application to use bearer tokens to authenticate users\n app.UseOAuthBearerTokens(OAuthOptions);\n\n // Uncomment the following lines to enable logging in with third party login providers\n //app.UseMicrosoftAccountAuthentication(\n // clientId: \"\",\n // clientSecret: \"\");\n\n //app.UseTwitterAuthentication(\n // consumerKey: \"\",\n // consumerSecret: \"\");\n\n //app.UseFacebookAuthentication(\n // appId: \"\",\n // appSecret: \"\");\n\n //app.UseGoogleAuthentication(new GoogleOAuth2AuthenticationOptions()\n //{\n // ClientId = \"\",\n // ClientSecret = \"\"\n //});\n }\n }\n}\n", "meta": {"content_hash": "9ec934dfa0d13ffa58838a7443b03f1a", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 125, "avg_line_length": 39.86764705882353, "alnum_prop": 0.649206934710439, "repo_name": "idoychinov/Telerik_Academy_Homework", "id": "fc099f7cba0cb6b149835f8e45e331aa794d023f", "size": "2713", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ASP.NETWeb Forms/1.IntroductionToAspNet/WebApps/TestWebApiApp/App_Start/Startup.Auth.cs", "mode": "33188", "license": "mit", "language": [{"name": "ASP", "bytes": "224148"}, {"name": "C#", "bytes": "3952663"}, {"name": "CSS", "bytes": "3475942"}, {"name": "CoffeeScript", "bytes": "4453"}, {"name": "JavaScript", "bytes": "8996873"}, {"name": "Pascal", "bytes": "14823"}, {"name": "PowerShell", "bytes": "1717649"}, {"name": "Puppet", "bytes": "404631"}, {"name": "Shell", "bytes": "315"}, {"name": "TypeScript", "bytes": "11219"}, {"name": "XSLT", "bytes": "2081"}]}} {"text": "package com.xcode.mobile.smilealarm;\n\nimport android.app.Application;\nimport android.test.ApplicationTestCase;\n\n/**\n * Testing Fundamentals\n */\npublic class ApplicationTest extends ApplicationTestCase {\n public ApplicationTest() {\n super(Application.class);\n }\n}", "meta": {"content_hash": "b21a9600719a2c1d4e73911d8f2e36bc", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 93, "avg_line_length": 27.53846153846154, "alnum_prop": 0.7513966480446927, "repo_name": "anhnguyenbk/XCODE-MOBILE-APPS-DEV", "id": "70ec47085cc929fc009a0a2fd6b91a228c7705c0", "size": "358", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SmileAlarm/app/src/androidTest/java/com/xcode/mobile/smilealarm/ApplicationTest.java", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "5543"}, {"name": "Java", "bytes": "219057"}, {"name": "JavaScript", "bytes": "662"}]}} {"text": "\npackage com.twitter.zipkin.builder\n\nimport com.twitter.finagle.stats.{OstrichStatsReceiver, StatsReceiver}\nimport com.twitter.logging.config._\nimport com.twitter.logging.{ConsoleHandler, Logger, LoggerFactory}\nimport com.twitter.ostrich.admin._\nimport java.net.{InetAddress, InetSocketAddress}\nimport scala.util.matching.Regex\n\n/**\n * Base builder for a Zipkin service\n */\ncase class ZipkinServerBuilder(\n serverPort : Int,\n adminPort : Int,\n serverAddress : InetAddress = InetAddress.getByAddress(Array[Byte](0,0,0,0)),\n loggers : List[LoggerFactory] = List(LoggerFactory(level = Some(Level.DEBUG), handlers = List(ConsoleHandler()))),\n adminStatsNodes : List[StatsFactory] = List(StatsFactory(reporters = List(TimeSeriesCollectorFactory()))),\n adminStatsFilters : List[Regex] = List.empty,\n statsReceiver : StatsReceiver = new OstrichStatsReceiver\n ) extends Builder[(RuntimeEnvironment) => Unit] {\n\n def serverPort(p: Int) : ZipkinServerBuilder = copy(serverPort = p)\n def adminPort(p: Int) : ZipkinServerBuilder = copy(adminPort = p)\n def serverAddress(a: InetAddress) : ZipkinServerBuilder = copy(serverAddress = a)\n def loggers(l: List[LoggerFactory]) : ZipkinServerBuilder = copy(loggers = l)\n def statsReceiver(s: StatsReceiver) : ZipkinServerBuilder = copy(statsReceiver = s)\n\n def addLogger(l: LoggerFactory) : ZipkinServerBuilder = copy(loggers = loggers :+ l)\n def addAdminStatsNode(n: StatsFactory): ZipkinServerBuilder = copy(adminStatsNodes = adminStatsNodes :+ n)\n def addAdminStatsFilter(f: Regex) : ZipkinServerBuilder = copy(adminStatsFilters = adminStatsFilters :+ f)\n\n private lazy val adminServiceFactory: AdminServiceFactory =\n AdminServiceFactory(\n httpPort = adminPort,\n statsNodes = adminStatsNodes,\n statsFilters = adminStatsFilters\n )\n\n lazy val socketAddress = new InetSocketAddress(serverAddress, serverPort)\n\n var adminHttpService: Option[AdminHttpService] = None\n\n def apply() = (runtime: RuntimeEnvironment) => {\n Logger.configure(loggers)\n adminHttpService = Some(adminServiceFactory(runtime))\n }\n }\n", "meta": {"content_hash": "0719ee0a862370c19d8fb48e77c5a744", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 138, "avg_line_length": 47.97959183673469, "alnum_prop": 0.6741811994895789, "repo_name": "coursera/zipkin", "id": "e0b70118772b4d9252bd767cfdcdb36b2215aecf", "size": "2947", "binary": false, "copies": "1", "ref": "refs/heads/coursera-zipkin", "path": "zipkin-query-service/src/main/scala/com/twitter/zipkin/builder/ZipkinServerBuilder.scala", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "10180"}, {"name": "HTML", "bytes": "18713"}, {"name": "Java", "bytes": "37218"}, {"name": "JavaScript", "bytes": "322607"}, {"name": "Scala", "bytes": "422991"}, {"name": "Shell", "bytes": "3977"}, {"name": "Thrift", "bytes": "7894"}]}} {"text": "package matchers\n\nimport (\n\t\"fmt\"\n\t\"reflect\"\n\n\t\"github.com/mysza/go-service-template/Godeps/_workspace/src/github.com/onsi/gomega/format\"\n)\n\ntype AssignableToTypeOfMatcher struct {\n\tExpected interface{}\n}\n\nfunc (matcher *AssignableToTypeOfMatcher) Match(actual interface{}) (success bool, err error) {\n\tif actual == nil || matcher.Expected == nil {\n\t\treturn false, fmt.Errorf(\"Refusing to compare to .\\nBe explicit and use BeNil() instead. This is to avoid mistakes where both sides of an assertion are erroneously uninitialized.\")\n\t}\n\n\tactualType := reflect.TypeOf(actual)\n\texpectedType := reflect.TypeOf(matcher.Expected)\n\n\treturn actualType.AssignableTo(expectedType), nil\n}\n\nfunc (matcher *AssignableToTypeOfMatcher) FailureMessage(actual interface{}) string {\n\treturn format.Message(actual, fmt.Sprintf(\"to be assignable to the type: %T\", matcher.Expected))\n}\n\nfunc (matcher *AssignableToTypeOfMatcher) NegatedFailureMessage(actual interface{}) string {\n\treturn format.Message(actual, fmt.Sprintf(\"not to be assignable to the type: %T\", matcher.Expected))\n}\n", "meta": {"content_hash": "f5c410e8325c697313f45b4aff3e6566", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 194, "avg_line_length": 34.67741935483871, "alnum_prop": 0.7646511627906977, "repo_name": "mysza/go-service-template", "id": "f6d4c8b05877dcdedcaa13f476846f81fbdddd79", "size": "1075", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Godeps/_workspace/src/github.com/onsi/gomega/matchers/assignable_to_type_of_matcher.go", "mode": "33188", "license": "mit", "language": [{"name": "Go", "bytes": "429"}, {"name": "Makefile", "bytes": "799"}]}} {"text": "\n\n\n\n \n\n Getting Started\n\n \n \n\n \n\n \n \n \n\n \n \n\n \n \n\n\n \n \n\n\n\n
\n \n
\n
\n

\n Social Hacking with ML\n

\n
\n

Devananda van der Veen

\n

twitter: @devananda

\n
\n

devananda.github.io/talks/

\n
\n
\n

Trends...

\n
    \n
  • 1999 - \"Peer to Peer\" networking
  • \n
  • 2003 - LAMP stack
  • \n
  • 2008 - Cloud
  • \n
  • 2016 - Machine Learning
  • \n
\n
\n
\n \n

Cloud vs Machine Learning

\n
\n
\n

What can it do?

\n
\n
\n \n
\n
\n

... and then what?

\n
\n
\n

FireWise

\n
\n
\n https://cloud.google.com/public-datasets/\n
\n
\n

$?

\n
\n
\n

Get involved

\n
\n
\n
\n\n \n \n \n\n\n", "meta": {"content_hash": "fbd073ac3cfff277d1d8ba2b7103607f", "timestamp": "", "source": "github", "line_count": 122, "max_line_length": 134, "avg_line_length": 40.114754098360656, "alnum_prop": 0.5657948508377605, "repo_name": "devananda/talks", "id": "9af03f9def530e6e42346ad2f09ad23e7827719c", "size": "4894", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "social-hacking-with-ml.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "201296"}, {"name": "HTML", "bytes": "225649"}, {"name": "JavaScript", "bytes": "254227"}]}} {"text": "\n\n \n \n \n maths: Not compatible \ud83d\udc7c\n \n \n \n \n \n \n \n \n \n \n
\n \n
\n
\n
\n \u00ab Up\n

\n maths\n \n 8.10.0\n Not compatible \ud83d\udc7c\n \n

\n

\ud83d\udcc5 (2022-06-19 05:53:44 UTC)

\n

Context

\n
# Packages matching: installed\n# Name              # Installed  # Synopsis\nbase-bigarray       base\nbase-num            base         Num library distributed with the OCaml compiler\nbase-threads        base\nbase-unix           base\ncamlp4              4.05+1       Camlp4 is a system for writing extensible parsers for programming languages\nconf-findutils      1            Virtual package relying on findutils\ncoq                 8.5.2~camlp4 Formal proof management system\nnum                 0            The Num library for arbitrary-precision integer and rational arithmetic\nocaml               4.05.0       The OCaml compiler (virtual package)\nocaml-base-compiler 4.05.0       Official 4.05.0 release\nocaml-config        1            OCaml Switch Configuration\nocamlbuild          0.14.1       OCamlbuild is a build system with builtin rules to easily build most OCaml projects\n# opam file:\nopam-version: "2.0"\nmaintainer: "Hugo.Herbelin@inria.fr"\nhomepage: "https://github.com/coq-contribs/maths"\nlicense: "LGPL 2.1"\nbuild: [make "-j%{jobs}%"]\ninstall: [make "install"]\nremove: ["rm" "-R" "%{lib}%/coq/user-contrib/Maths"]\ndepends: [\n  "ocaml"\n  "coq" {>= "8.10" & < "8.11~"}\n]\ntags: [\n  "keyword: mathematics"\n  "category: Mathematics/Arithmetic and Number Theory/Number theory"\n]\nauthors: [\n  "Jean-Christophe Filli\u00e2tre"\n]\nbug-reports: "https://github.com/coq-contribs/maths/issues"\ndev-repo: "git+https://github.com/coq-contribs/maths.git"\nsynopsis: "Basic mathematics"\ndescription: """\nBasic mathematics (gcd, primality, etc.) from\nFrench ``Mathematiques Superieures'' (first year of preparation to\nhigh schools)"""\nflags: light-uninstall\nurl {\n  src: "https://github.com/coq-contribs/maths/archive/v8.10.0.tar.gz"\n  checksum: "md5=0bb016a4357e219f2099e242366e71bf"\n}\n
\n

Lint

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
\n

Dry install \ud83c\udfdc\ufe0f

\n

Dry install with the current Coq version:

\n
\n
Command
\n
opam install -y --show-action coq-maths.8.10.0 coq.8.5.2~camlp4
\n
Return code
\n
5120
\n
Output
\n
[NOTE] Package coq is already installed (current version is 8.5.2~camlp4).\nThe following dependencies couldn't be met:\n  - coq-maths -> coq >= 8.10\nYour request can't be satisfied:\n  - No available version of coq satisfies the constraints\nNo solution found, exiting\n
\n
\n

Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:

\n
\n
Command
\n
opam remove -y coq; opam install -y --show-action --unlock-base coq-maths.8.10.0
\n
Return code
\n
0
\n
\n

Install dependencies

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Install \ud83d\ude80

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Duration
\n
0 s
\n
\n

Installation size

\n

No files were installed.

\n

Uninstall \ud83e\uddf9

\n
\n
Command
\n
true
\n
Return code
\n
0
\n
Missing removes
\n
\n none\n
\n
Wrong removes
\n
\n none\n
\n
\n
\n
\n
\n
\n
\n

\n Sources are on GitHub \u00a9 Guillaume Claret \ud83d\udc23\n

\n
\n
\n \n \n \n\n", "meta": {"content_hash": "02cfa1100842832565a40ea18f5843ad", "timestamp": "", "source": "github", "line_count": 171, "max_line_length": 159, "avg_line_length": 40.538011695906434, "alnum_prop": 0.54399884593191, "repo_name": "coq-bench/coq-bench.github.io", "id": "877305d35e80b58eb9d8462a60af22c7891b3386", "size": "6958", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.05.0-2.0.1/released/8.5.2~camlp4/maths/8.10.0.html", "mode": "33188", "license": "mit", "language": []}} {"text": "// Created by Andrea Arteaga, MeteoSwiss\r\n// Email: andyspiros@gmail.com\r\n// January 2013\r\n\r\n#pragma once\r\n\r\n#include \"SavePoint.h\"\r\n\r\nclass Serializer;\r\n\r\nclass SerializerOutput\r\n{\r\npublic:\r\n /**\r\n * Default constructor\r\n */\r\n SerializerOutput() { }\r\n\r\n /**\r\n * Copy constructor\r\n */\r\n SerializerOutput(const SerializerOutput& other)\r\n {\r\n *this = other;\r\n }\r\n\r\n /**\r\n * Assignment operator\r\n */\r\n SerializerOutput& operator=(const SerializerOutput& other)\r\n {\r\n pSerializer_ = other.pSerializer_;\r\n savePoint_ = other.savePoint_;\r\n return *this;\r\n }\r\n inline void Init(Serializer& serializer, std::string savePointName);\r\n\r\n inline void set_SavePoint(const SavePoint& other) { savePoint_ = other; }\r\n\r\n inline SerializerOutput& operator<< (const MetaInfo& info);\r\n\r\n template\r\n inline SerializerOutput& operator<< (const TDataField& field);\r\n\r\nprivate:\r\n SavePoint savePoint_;\r\n Serializer* pSerializer_;\r\n};\r\n\r\n\r\n", "meta": {"content_hash": "4561ea1c1321f29eb8166410449be86e", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 77, "avg_line_length": 20.8, "alnum_prop": 0.6182692307692308, "repo_name": "bcumming/mini-stencil", "id": "91d2b75d9e79fcd19737b34b71a588c0457eb6d8", "size": "1040", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/serialize/src/SerializerOutput.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "617485"}, {"name": "C++", "bytes": "1517839"}, {"name": "FORTRAN", "bytes": "310206"}, {"name": "Objective-C", "bytes": "12105"}, {"name": "Python", "bytes": "81906"}]}} {"text": "leakless\n========\n\nMiscellaneous process related resource management modules\n\n\nAPI Document\n------------\n\nsee: [Edoc](doc/README.md)\n", "meta": {"content_hash": "39cae2f424da051aaf4cbf61b2900399", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 57, "avg_line_length": 13.3, "alnum_prop": 0.6691729323308271, "repo_name": "sile/leakless", "id": "f0ca93e9cb76d5527484e694146074f0496c2c62", "size": "133", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [{"name": "Erlang", "bytes": "33606"}, {"name": "Makefile", "bytes": "1140"}]}} {"text": "package awsiam\n\nimport (\n\t\"encoding/json\"\n\t\"errors\"\n\n\t\"code.cloudfoundry.org/lager\"\n\t\"github.com/aws/aws-sdk-go/aws\"\n\t\"github.com/aws/aws-sdk-go/aws/awserr\"\n\t\"github.com/aws/aws-sdk-go/service/iam\"\n)\n\ntype UserPolicy struct {\n\tVersion string `json:\"Version\"`\n\tID string `json:\"Id\"`\n\tStatements []UserPolicyStatement `json:\"Statement\"`\n}\n\ntype UserPolicyStatement struct {\n\tSID string `json:\"Sid\"`\n\tEffect string `json:\"Effect\"`\n\tAction string `json:\"Action\"`\n\tResource string `json:\"Resource\"`\n}\n\ntype IAMUser struct {\n\tiamsvc *iam.IAM\n\tlogger lager.Logger\n}\n\nfunc NewIAMUser(\n\tiamsvc *iam.IAM,\n\tlogger lager.Logger,\n) *IAMUser {\n\treturn &IAMUser{\n\t\tiamsvc: iamsvc,\n\t\tlogger: logger.Session(\"iam-user\"),\n\t}\n}\n\nfunc (i *IAMUser) Describe(userName string) (UserDetails, error) {\n\tuserDetails := UserDetails{\n\t\tUserName: userName,\n\t}\n\n\tgetUserInput := &iam.GetUserInput{\n\t\tUserName: aws.String(userName),\n\t}\n\ti.logger.Debug(\"get-user\", lager.Data{\"input\": getUserInput})\n\n\tgetUserOutput, err := i.iamsvc.GetUser(getUserInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn userDetails, errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn userDetails, err\n\t}\n\ti.logger.Debug(\"get-user\", lager.Data{\"output\": getUserOutput})\n\n\tuserDetails.UserARN = aws.StringValue(getUserOutput.User.Arn)\n\tuserDetails.UserID = aws.StringValue(getUserOutput.User.UserId)\n\n\treturn userDetails, nil\n}\n\nfunc (i *IAMUser) Create(userName string) (string, error) {\n\tcreateUserInput := &iam.CreateUserInput{\n\t\tUserName: aws.String(userName),\n\t}\n\ti.logger.Debug(\"create-user\", lager.Data{\"input\": createUserInput})\n\n\tcreateUserOutput, err := i.iamsvc.CreateUser(createUserInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn \"\", errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn \"\", err\n\t}\n\ti.logger.Debug(\"create-user\", lager.Data{\"output\": createUserOutput})\n\n\treturn aws.StringValue(createUserOutput.User.Arn), nil\n}\n\nfunc (i *IAMUser) Delete(userName string) error {\n\tdeleteUserInput := &iam.DeleteUserInput{\n\t\tUserName: aws.String(userName),\n\t}\n\ti.logger.Debug(\"delete-user\", lager.Data{\"input\": deleteUserInput})\n\n\tdeleteUserOutput, err := i.iamsvc.DeleteUser(deleteUserInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn err\n\t}\n\ti.logger.Debug(\"delete-user\", lager.Data{\"output\": deleteUserOutput})\n\n\treturn nil\n}\n\nfunc (i *IAMUser) ListAccessKeys(userName string) ([]string, error) {\n\tvar accessKeys []string\n\n\tlistAccessKeysInput := &iam.ListAccessKeysInput{\n\t\tUserName: aws.String(userName),\n\t}\n\ti.logger.Debug(\"list-access-keys\", lager.Data{\"input\": listAccessKeysInput})\n\n\tlistAccessKeysOutput, err := i.iamsvc.ListAccessKeys(listAccessKeysInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn accessKeys, errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn accessKeys, err\n\t}\n\ti.logger.Debug(\"list-access-keys\", lager.Data{\"output\": listAccessKeysOutput})\n\n\tfor _, accessKey := range listAccessKeysOutput.AccessKeyMetadata {\n\t\taccessKeys = append(accessKeys, aws.StringValue(accessKey.AccessKeyId))\n\t}\n\n\treturn accessKeys, nil\n}\n\nfunc (i *IAMUser) CreateAccessKey(userName string) (string, string, error) {\n\tcreateAccessKeyInput := &iam.CreateAccessKeyInput{\n\t\tUserName: aws.String(userName),\n\t}\n\ti.logger.Debug(\"create-access-key\", lager.Data{\"input\": createAccessKeyInput})\n\n\tcreateAccessKeyOutput, err := i.iamsvc.CreateAccessKey(createAccessKeyInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn \"\", \"\", errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn \"\", \"\", err\n\t}\n\ti.logger.Debug(\"create-access-key\", lager.Data{\"output\": createAccessKeyOutput})\n\n\treturn aws.StringValue(createAccessKeyOutput.AccessKey.AccessKeyId), aws.StringValue(createAccessKeyOutput.AccessKey.SecretAccessKey), nil\n}\n\nfunc (i *IAMUser) DeleteAccessKey(userName string, accessKeyID string) error {\n\tdeleteAccessKeyInput := &iam.DeleteAccessKeyInput{\n\t\tUserName: aws.String(userName),\n\t\tAccessKeyId: aws.String(accessKeyID),\n\t}\n\ti.logger.Debug(\"delete-access-key\", lager.Data{\"input\": deleteAccessKeyInput})\n\n\tdeleteAccessKeyOutput, err := i.iamsvc.DeleteAccessKey(deleteAccessKeyInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn err\n\t}\n\ti.logger.Debug(\"delete-access-key\", lager.Data{\"output\": deleteAccessKeyOutput})\n\n\treturn nil\n}\n\nfunc (i *IAMUser) CreatePolicy(policyName string, effect string, action string, resource string) (string, error) {\n\tpolicyDocument, err := i.buildUserPolicy(policyName, effect, action, resource)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\tcreatePolicyInput := &iam.CreatePolicyInput{\n\t\tPolicyName: aws.String(policyName),\n\t\tPolicyDocument: aws.String(policyDocument),\n\t}\n\ti.logger.Debug(\"create-policy\", lager.Data{\"input\": createPolicyInput})\n\n\tcreatePolicyOutput, err := i.iamsvc.CreatePolicy(createPolicyInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn \"\", errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn \"\", err\n\t}\n\ti.logger.Debug(\"create-policy\", lager.Data{\"output\": createPolicyOutput})\n\n\treturn aws.StringValue(createPolicyOutput.Policy.Arn), nil\n}\n\nfunc (i *IAMUser) DeletePolicy(policyARN string) error {\n\tdeletePolicyInput := &iam.DeletePolicyInput{\n\t\tPolicyArn: aws.String(policyARN),\n\t}\n\ti.logger.Debug(\"delete-policy\", lager.Data{\"input\": deletePolicyInput})\n\n\tdeletePolicyOutput, err := i.iamsvc.DeletePolicy(deletePolicyInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn err\n\t}\n\ti.logger.Debug(\"delete-policy\", lager.Data{\"output\": deletePolicyOutput})\n\n\treturn nil\n}\n\nfunc (i *IAMUser) ListAttachedUserPolicies(userName string) ([]string, error) {\n\tvar userPolicies []string\n\n\tlistAttachedUserPoliciesInput := &iam.ListAttachedUserPoliciesInput{\n\t\tUserName: aws.String(userName),\n\t}\n\ti.logger.Debug(\"list-attached-user-policies\", lager.Data{\"input\": listAttachedUserPoliciesInput})\n\n\tlistAttachedUserPoliciesOutput, err := i.iamsvc.ListAttachedUserPolicies(listAttachedUserPoliciesInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn userPolicies, errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn userPolicies, err\n\t}\n\ti.logger.Debug(\"list-attached-user-policies\", lager.Data{\"output\": listAttachedUserPoliciesOutput})\n\n\tfor _, userPolicy := range listAttachedUserPoliciesOutput.AttachedPolicies {\n\t\tuserPolicies = append(userPolicies, aws.StringValue(userPolicy.PolicyArn))\n\t}\n\n\treturn userPolicies, nil\n}\n\nfunc (i *IAMUser) AttachUserPolicy(userName string, policyARN string) error {\n\tattachUserPolicyInput := &iam.AttachUserPolicyInput{\n\t\tPolicyArn: aws.String(policyARN),\n\t\tUserName: aws.String(userName),\n\t}\n\ti.logger.Debug(\"attach-user-policy\", lager.Data{\"input\": attachUserPolicyInput})\n\n\tattachUserPolicyOutput, err := i.iamsvc.AttachUserPolicy(attachUserPolicyInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn err\n\t}\n\ti.logger.Debug(\"attach-user-policy\", lager.Data{\"output\": attachUserPolicyOutput})\n\n\treturn nil\n}\n\nfunc (i *IAMUser) DetachUserPolicy(userName string, policyARN string) error {\n\tdetachUserPolicyInput := &iam.DetachUserPolicyInput{\n\t\tPolicyArn: aws.String(policyARN),\n\t\tUserName: aws.String(userName),\n\t}\n\ti.logger.Debug(\"detach-user-policy\", lager.Data{\"input\": detachUserPolicyInput})\n\n\tdetachUserPolicyOutput, err := i.iamsvc.DetachUserPolicy(detachUserPolicyInput)\n\tif err != nil {\n\t\ti.logger.Error(\"aws-iam-error\", err)\n\t\tif awsErr, ok := err.(awserr.Error); ok {\n\t\t\treturn errors.New(awsErr.Code() + \": \" + awsErr.Message())\n\t\t}\n\t\treturn err\n\t}\n\ti.logger.Debug(\"detach-user-policy\", lager.Data{\"output\": detachUserPolicyOutput})\n\n\treturn nil\n}\n\nfunc (i *IAMUser) buildUserPolicy(policyID string, effect string, action string, resource string) (string, error) {\n\tuserPolicy := UserPolicy{\n\t\tVersion: \"2012-10-17\",\n\t\tID: policyID,\n\t\tStatements: []UserPolicyStatement{\n\t\t\tUserPolicyStatement{\n\t\t\t\tSID: \"1\",\n\t\t\t\tEffect: effect,\n\t\t\t\tAction: action,\n\t\t\t\tResource: resource,\n\t\t\t},\n\t\t\tUserPolicyStatement{\n\t\t\t\tSID: \"2\",\n\t\t\t\tEffect: effect,\n\t\t\t\tAction: action,\n\t\t\t\tResource: resource + \"/*\",\n\t\t\t},\n\t\t},\n\t}\n\n\tpolicy, err := json.Marshal(userPolicy)\n\tif err != nil {\n\t\treturn \"\", err\n\t}\n\n\treturn string(policy), nil\n}\n", "meta": {"content_hash": "012bdc53874db2f6be588cb6028dcbf7", "timestamp": "", "source": "github", "line_count": 304, "max_line_length": 139, "avg_line_length": 29.713815789473685, "alnum_prop": 0.7079597033100853, "repo_name": "jmcarp/s3-broker", "id": "93fb4e93663e94e491e74787fe4a57e5fa19e481", "size": "9033", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "awsiam/iam_user.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Go", "bytes": "61277"}, {"name": "Shell", "bytes": "7278"}]}} {"text": " ILineParser.Parse(string s)\n {\n return s.Split(Separator, int.MaxValue, StringSplitOptions.None);\n }\n }\n}", "meta": {"content_hash": "28521ab9590d115155657130252a534a", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 77, "avg_line_length": 25.6875, "alnum_prop": 0.6861313868613139, "repo_name": "tanglebones/ch-testing", "id": "9ee20b4060b4932893501e2a28328329e257dc88", "size": "411", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "T2/Component/LineParser.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "41263"}]}} {"text": "import android.content.ContentResolver;\r\nimport android.content.ContentValues;\r\nimport android.content.Context;\r\nimport android.database.Cursor;\r\nimport android.database.DatabaseUtils;\r\nimport android.database.sqlite.SQLiteDatabase;\r\nimport android.net.Uri;\r\nimport android.util.Log;\r\nimport com.google.android.libraries.social.autobackup.MediaRecordEntry;\r\nimport java.io.IOException;\r\n\r\npublic final class hbm\r\n{\r\n private static final String a = MediaRecordEntry.a.a;\r\n private static final String b;\r\n private static final String c;\r\n private static final String d;\r\n private static final String e;\r\n \r\n static\r\n {\r\n String str1 = String.valueOf(\"upload_account_id = -1 AND _id > ? AND media_url NOT IN ( SELECT media_url FROM \");\r\n String str2 = a;\r\n String str3 = String.valueOf(\"upload_account_id\");\r\n String str4 = String.valueOf(\"bucket_id\");\r\n String str5 = String.valueOf(\"bucket_id\");\r\n String str6 = String.valueOf(\"bucket_id\");\r\n String str7 = String.valueOf(\"exclude_bucket\");\r\n b = 57 + String.valueOf(str1).length() + String.valueOf(str2).length() + String.valueOf(str3).length() + String.valueOf(str4).length() + String.valueOf(str5).length() + String.valueOf(str6).length() + String.valueOf(str7).length() + str1 + str2 + \" WHERE \" + str3 + \" = ? )\" + \" AND (\" + str4 + \" IS NULL OR \" + str5 + \" NOT IN ( SELECT \" + str6 + \" FROM \" + str7 + \" ))\";\r\n String str8 = String.valueOf(MediaRecordEntry.a.a);\r\n String str9 = String.valueOf(\"upload_account_id = ? AND upload_state = 100\");\r\n c = 28 + String.valueOf(str8).length() + String.valueOf(str9).length() + \"SELECT count(*) FROM \" + str8 + \" WHERE \" + str9;\r\n String str10 = a;\r\n String str11 = String.valueOf(\"upload_account_id = ? AND ( upload_state = 100 OR upload_state = 200 ) AND upload_reason = ?\");\r\n d = 28 + String.valueOf(str10).length() + String.valueOf(str11).length() + \"SELECT COUNT(*) FROM \" + str10 + \" WHERE \" + str11;\r\n String str12 = String.valueOf(\"upload_account_id = -1 AND bucket_id = ? AND media_url NOT IN ( SELECT media_url FROM \");\r\n String str13 = a;\r\n String str14 = String.valueOf(\"upload_account_id\");\r\n e = 13 + String.valueOf(str12).length() + String.valueOf(str13).length() + String.valueOf(str14).length() + str12 + str13 + \" WHERE \" + str14 + \" = ? )\";\r\n }\r\n \r\n private static int a(SQLiteDatabase paramSQLiteDatabase, int paramInt1, int paramInt2)\r\n {\r\n Cursor localCursor = a(paramSQLiteDatabase, paramInt1, -1L, 500);\r\n int i = 0;\r\n try\r\n {\r\n while (localCursor.moveToNext())\r\n {\r\n MediaRecordEntry localMediaRecordEntry = MediaRecordEntry.a(localCursor);\r\n localMediaRecordEntry.id = 0L;\r\n localMediaRecordEntry.mUploadAccountId = paramInt1;\r\n localMediaRecordEntry.mUploadReason = paramInt2;\r\n localMediaRecordEntry.mUploadState = 100;\r\n MediaRecordEntry.a.a(paramSQLiteDatabase, localMediaRecordEntry);\r\n i++;\r\n }\r\n return i;\r\n }\r\n finally\r\n {\r\n localCursor.close();\r\n }\r\n }\r\n \r\n static Cursor a(SQLiteDatabase paramSQLiteDatabase, int paramInt1, long paramLong, int paramInt2)\r\n {\r\n String str1 = a;\r\n String[] arrayOfString1 = MediaRecordEntry.a.b;\r\n String str2 = b;\r\n String[] arrayOfString2 = new String[2];\r\n arrayOfString2[0] = Long.toString(paramLong);\r\n arrayOfString2[1] = Integer.toString(paramInt1);\r\n return paramSQLiteDatabase.query(true, str1, arrayOfString1, str2, arrayOfString2, null, null, \"_id ASC\", Integer.toString(paramInt2));\r\n }\r\n \r\n public static void a(SQLiteDatabase paramSQLiteDatabase, int paramInt, String paramString)\r\n {\r\n if (paramInt == -1) {}\r\n int i;\r\n do\r\n {\r\n return;\r\n i = 0;\r\n int j;\r\n do\r\n {\r\n j = b(paramSQLiteDatabase, paramInt, paramString);\r\n if (Log.isLoggable(\"iu.UploadsManager\", 4)) {\r\n new StringBuilder(56).append(\"ADD; medias added in batch: \").append(j).append(\"; iu: \").append(paramInt);\r\n }\r\n i += j;\r\n } while (j > 0);\r\n } while (!Log.isLoggable(\"iu.UploadsManager\", 4));\r\n new StringBuilder(60).append(\"ADD; complete; total scheduled: \").append(i).append(\"; iu: \").append(paramInt);\r\n }\r\n \r\n public static void a(SQLiteDatabase paramSQLiteDatabase, String paramString)\r\n {\r\n paramSQLiteDatabase.delete(a, \"upload_account_id != -1 AND bucket_id = ? AND upload_state != 400\", new String[] { paramString });\r\n }\r\n \r\n public static void a(hci paramhci, int paramInt)\r\n {\r\n if (paramInt == -1) {\r\n return;\r\n }\r\n String[] arrayOfString = new String[1];\r\n arrayOfString[0] = Integer.toString(paramInt);\r\n paramhci.getWritableDatabase().delete(a, \"upload_account_id = ? AND upload_state = 100\", arrayOfString);\r\n }\r\n \r\n static void a(hci paramhci, int paramInt1, int paramInt2)\r\n {\r\n if ((paramInt2 != 40) && (paramInt2 != 30)) {\r\n throw new IllegalArgumentException(\"only REASON_UPLOAD_ALL and REASON_INSTANT_UPLOAD supported\");\r\n }\r\n SQLiteDatabase localSQLiteDatabase = paramhci.getWritableDatabase();\r\n String str = a;\r\n String[] arrayOfString = new String[2];\r\n arrayOfString[0] = Integer.toString(paramInt1);\r\n arrayOfString[1] = Integer.toString(paramInt2);\r\n localSQLiteDatabase.delete(str, \"upload_account_id = ? AND ( upload_state = 100 OR upload_state = 200 ) AND upload_reason = ?\", arrayOfString);\r\n }\r\n \r\n static boolean a(ContentResolver paramContentResolver, Uri paramUri)\r\n {\r\n String str1 = efj.a(paramContentResolver, paramUri, \"_data\");\r\n if (str1 != null)\r\n {\r\n int i = str1.lastIndexOf('.');\r\n if (i >= 0) {}\r\n for (String str2 = str1.substring(i + 1); (!\"jpg\".equalsIgnoreCase(str2)) && (!\"jpeg\".equalsIgnoreCase(str2)); str2 = \"\") {\r\n return false;\r\n }\r\n for (;;)\r\n {\r\n try\r\n {\r\n hxp localhxp = new hxp();\r\n try\r\n {\r\n localhxp.a(str1);\r\n int j = hxp.h;\r\n localhya = localhxp.a(j, localhxp.d(j));\r\n if (localhya != null) {\r\n continue;\r\n }\r\n localObject = null;\r\n if ((localObject == null) || (!((String)localObject).contains(\"Google\"))) {\r\n break label290;\r\n }\r\n if (!Log.isLoggable(\"iu.UploadsManager\", 4)) {\r\n break label292;\r\n }\r\n String str4 = String.valueOf(localObject);\r\n if (str4.length() == 0) {\r\n continue;\r\n }\r\n \"*** Found Google EXIF tag; value: \".concat(str4);\r\n }\r\n catch (IOException localIOException)\r\n {\r\n if (!Log.isLoggable(\"iu.UploadsManager\", 4)) {\r\n break label294;\r\n }\r\n }\r\n new StringBuilder(37 + String.valueOf(str1).length()).append(\"INFO: \").append(str1).append(\" does not contain any EXIF data\");\r\n }\r\n catch (Throwable localThrowable)\r\n {\r\n hya localhya;\r\n Object localObject;\r\n if (!Log.isLoggable(\"iu.UploadsManager\", 4)) {\r\n continue;\r\n }\r\n String str3 = String.valueOf(localThrowable);\r\n new StringBuilder(27 + String.valueOf(str1).length() + String.valueOf(str3).length()).append(\"INFO: \").append(str1).append(\" error getting EXIF; \").append(str3);\r\n return false;\r\n }\r\n localObject = localhya.a();\r\n }\r\n new String(\"*** Found Google EXIF tag; value: \");\r\n }\r\n else\r\n {\r\n label290:\r\n return false;\r\n }\r\n label292:\r\n return true;\r\n label294:\r\n return false;\r\n }\r\n \r\n static boolean a(Context paramContext)\r\n {\r\n Cursor localCursor = ((hci)mbb.a(paramContext, hci.class)).getReadableDatabase().query(true, a, MediaRecordEntry.a.b, \"upload_reason = 30 AND upload_state = 400\", null, null, null, null, \"1\");\r\n try\r\n {\r\n boolean bool = localCursor.moveToFirst();\r\n return bool;\r\n }\r\n finally\r\n {\r\n localCursor.close();\r\n }\r\n }\r\n \r\n public static boolean a(Context paramContext, int paramInt1, int paramInt2)\r\n {\r\n hci localhci = (hci)mbb.a(paramContext, hci.class);\r\n String str1;\r\n String[] arrayOfString;\r\n if (paramInt1 == -1)\r\n {\r\n str1 = \"upload_account_id != -1 AND upload_state = 200\";\r\n arrayOfString = null;\r\n }\r\n for (;;)\r\n {\r\n if (Log.isLoggable(\"iu.UploadsManager\", 4))\r\n {\r\n String str2 = a;\r\n String str3 = 28 + String.valueOf(str2).length() + String.valueOf(str1).length() + \"SELECT COUNT(*) FROM \" + str2 + \" WHERE \" + str1;\r\n long l = DatabaseUtils.longForQuery(localhci.getReadableDatabase(), str3, arrayOfString);\r\n new StringBuilder(40).append(\"num queued entries: \").append(l);\r\n }\r\n ContentValues localContentValues = new ContentValues(1);\r\n localContentValues.put(\"upload_state\", Integer.valueOf(100));\r\n int i = localhci.getWritableDatabase().update(a, localContentValues, str1, arrayOfString);\r\n if (Log.isLoggable(\"iu.UploadsManager\", 4)) {\r\n new StringBuilder(32).append(\"num updated entries: \").append(i);\r\n }\r\n if (i <= 0) {\r\n break;\r\n }\r\n return true;\r\n str1 = \"upload_account_id = ? AND upload_state = 200\";\r\n arrayOfString = new String[1];\r\n arrayOfString[0] = Integer.toString(paramInt1);\r\n }\r\n return false;\r\n }\r\n \r\n public static boolean a(Context paramContext, ContentResolver paramContentResolver, SQLiteDatabase paramSQLiteDatabase, ContentValues paramContentValues, String paramString, long paramLong, Uri paramUri, boolean paramBoolean1, boolean paramBoolean2)\r\n {\r\n hba localhba = (hba)mbb.a(paramContext, hba.class);\r\n String str1 = paramUri.toString();\r\n paramContentValues.clear();\r\n paramContentValues.putNull(\"album_id\");\r\n paramContentValues.putNull(\"event_id\");\r\n paramContentValues.put(\"upload_account_id\", Integer.valueOf(-1));\r\n paramContentValues.put(\"bucket_id\", paramString);\r\n paramContentValues.put(\"is_image\", Boolean.valueOf(paramBoolean1));\r\n paramContentValues.put(\"media_id\", Long.valueOf(paramLong));\r\n paramContentValues.put(\"media_time\", Long.valueOf(efj.a(paramContentResolver, paramUri)));\r\n String str2 = efj.a(paramContentResolver, paramUri, \"_data\");\r\n if (str2 == null) {\r\n str2 = str1;\r\n }\r\n paramContentValues.put(\"media_hash\", Integer.valueOf(str2.hashCode()));\r\n paramContentValues.put(\"media_url\", str1);\r\n paramContentValues.put(\"upload_reason\", Integer.valueOf(0));\r\n paramContentValues.put(\"upload_state\", Integer.valueOf(500));\r\n MediaRecordEntry.a.a(paramSQLiteDatabase, MediaRecordEntry.a(paramContentValues));\r\n if ((!paramBoolean2) || (a(paramContentResolver, paramUri))) {\r\n return false;\r\n }\r\n int i = localhba.d();\r\n paramContentValues.putNull(\"event_id\");\r\n if (!hbj.a(paramContext))\r\n {\r\n paramContentValues.put(\"upload_account_id\", Integer.valueOf(i));\r\n paramContentValues.put(\"upload_reason\", Integer.valueOf(30));\r\n paramContentValues.put(\"upload_state\", Integer.valueOf(100));\r\n MediaRecordEntry.a.a(paramSQLiteDatabase, MediaRecordEntry.a(paramContentValues));\r\n if (Log.isLoggable(\"iu.UploadsManager\", 4)) {\r\n new StringBuilder(59).append(\"NEW; upload media id: \").append(paramLong).append(\"; iu: \").append(i);\r\n }\r\n }\r\n if (Log.isLoggable(\"iu.UploadsManager\", 4)) {\r\n new StringBuilder(39).append(\"NEW; add media id: \").append(paramLong);\r\n }\r\n return true;\r\n }\r\n \r\n /* Error */\r\n private static int b(SQLiteDatabase paramSQLiteDatabase, int paramInt, String paramString)\r\n {\r\n // Byte code:\r\n // 0: aload_0\r\n // 1: invokevirtual 370\tandroid/database/sqlite/SQLiteDatabase:beginTransaction\t()V\r\n // 4: getstatic 22\thbm:a\tLjava/lang/String;\r\n // 7: astore 4\r\n // 9: getstatic 17\tcom/google/android/libraries/social/autobackup/MediaRecordEntry:a\tLiao;\r\n // 12: getfield 124\tiao:b\t[Ljava/lang/String;\r\n // 15: astore 5\r\n // 17: getstatic 86\thbm:e\tLjava/lang/String;\r\n // 20: astore 6\r\n // 22: iconst_2\r\n // 23: anewarray 26\tjava/lang/String\r\n // 26: astore 7\r\n // 28: aload 7\r\n // 30: iconst_0\r\n // 31: aload_2\r\n // 32: aastore\r\n // 33: aload 7\r\n // 35: iconst_1\r\n // 36: iload_1\r\n // 37: invokestatic 134\tjava/lang/Integer:toString\t(I)Ljava/lang/String;\r\n // 40: aastore\r\n // 41: aload_0\r\n // 42: aload 4\r\n // 44: aload 5\r\n // 46: aload 6\r\n // 48: aload 7\r\n // 50: aconst_null\r\n // 51: aconst_null\r\n // 52: aconst_null\r\n // 53: sipush 500\r\n // 56: invokestatic 134\tjava/lang/Integer:toString\t(I)Ljava/lang/String;\r\n // 59: invokevirtual 373\tandroid/database/sqlite/SQLiteDatabase:query\t(Ljava/lang/String;[Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Landroid/database/Cursor;\r\n // 62: astore 8\r\n // 64: iconst_0\r\n // 65: istore 9\r\n // 67: aload 8\r\n // 69: invokeinterface 98 1 0\r\n // 74: ifeq +52 -> 126\r\n // 77: aload 8\r\n // 79: invokestatic 101\tcom/google/android/libraries/social/autobackup/MediaRecordEntry:a\t(Landroid/database/Cursor;)Lcom/google/android/libraries/social/autobackup/MediaRecordEntry;\r\n // 82: astore 11\r\n // 84: aload 11\r\n // 86: lconst_0\r\n // 87: putfield 105\tcom/google/android/libraries/social/autobackup/MediaRecordEntry:id\tJ\r\n // 90: aload 11\r\n // 92: iload_1\r\n // 93: putfield 109\tcom/google/android/libraries/social/autobackup/MediaRecordEntry:mUploadAccountId\tI\r\n // 96: aload 11\r\n // 98: bipush 30\r\n // 100: putfield 112\tcom/google/android/libraries/social/autobackup/MediaRecordEntry:mUploadReason\tI\r\n // 103: aload 11\r\n // 105: bipush 100\r\n // 107: putfield 115\tcom/google/android/libraries/social/autobackup/MediaRecordEntry:mUploadState\tI\r\n // 110: getstatic 17\tcom/google/android/libraries/social/autobackup/MediaRecordEntry:a\tLiao;\r\n // 113: aload_0\r\n // 114: aload 11\r\n // 116: invokevirtual 118\tiao:a\t(Landroid/database/sqlite/SQLiteDatabase;Lial;)J\r\n // 119: pop2\r\n // 120: iinc 9 1\r\n // 123: goto -56 -> 67\r\n // 126: aload_0\r\n // 127: invokevirtual 376\tandroid/database/sqlite/SQLiteDatabase:setTransactionSuccessful\t()V\r\n // 130: aload 8\r\n // 132: invokeinterface 121 1 0\r\n // 137: aload_0\r\n // 138: invokevirtual 379\tandroid/database/sqlite/SQLiteDatabase:endTransaction\t()V\r\n // 141: iload 9\r\n // 143: ireturn\r\n // 144: astore 10\r\n // 146: aload 8\r\n // 148: invokeinterface 121 1 0\r\n // 153: aload 10\r\n // 155: athrow\r\n // 156: astore_3\r\n // 157: aload_0\r\n // 158: invokevirtual 379\tandroid/database/sqlite/SQLiteDatabase:endTransaction\t()V\r\n // 161: aload_3\r\n // 162: athrow\r\n // Local variable table:\r\n // start\tlength\tslot\tname\tsignature\r\n // 0\t163\t0\tparamSQLiteDatabase\tSQLiteDatabase\r\n // 0\t163\t1\tparamInt\tint\r\n // 0\t163\t2\tparamString\tString\r\n // 156\t6\t3\tlocalObject1\tObject\r\n // 7\t36\t4\tstr1\tString\r\n // 15\t30\t5\tarrayOfString1\tString[]\r\n // 20\t27\t6\tstr2\tString\r\n // 26\t23\t7\tarrayOfString2\tString[]\r\n // 62\t85\t8\tlocalCursor\tCursor\r\n // 65\t77\t9\ti\tint\r\n // 144\t10\t10\tlocalObject2\tObject\r\n // 82\t33\t11\tlocalMediaRecordEntry\tMediaRecordEntry\r\n // Exception table:\r\n // from\tto\ttarget\ttype\r\n // 67\t120\t144\tfinally\r\n // 126\t130\t144\tfinally\r\n // 4\t64\t156\tfinally\r\n // 130\t137\t156\tfinally\r\n // 146\t156\t156\tfinally\r\n }\r\n \r\n static int b(hci paramhci, int paramInt1, int paramInt2)\r\n {\r\n if (paramInt1 == -1) {\r\n return 0;\r\n }\r\n SQLiteDatabase localSQLiteDatabase = paramhci.getReadableDatabase();\r\n String str = d;\r\n String[] arrayOfString = new String[2];\r\n arrayOfString[0] = Integer.toString(paramInt1);\r\n arrayOfString[1] = Integer.toString(paramInt2);\r\n return (int)DatabaseUtils.longForQuery(localSQLiteDatabase, str, arrayOfString);\r\n }\r\n \r\n public static MediaRecordEntry b(hci paramhci, int paramInt)\r\n {\r\n String str;\r\n String[] arrayOfString;\r\n if (paramInt == -1)\r\n {\r\n str = \"upload_account_id != -1 AND upload_state = 100\";\r\n arrayOfString = null;\r\n }\r\n for (;;)\r\n {\r\n Cursor localCursor = paramhci.getReadableDatabase().query(a, MediaRecordEntry.a.b, str, arrayOfString, null, null, \"upload_reason ASC, upload_state ASC, upload_status ASC, is_image DESC, retry_end_time ASC LIMIT 1\");\r\n try\r\n {\r\n if (localCursor.moveToNext())\r\n {\r\n MediaRecordEntry localMediaRecordEntry = MediaRecordEntry.a(localCursor);\r\n return localMediaRecordEntry;\r\n str = \"upload_account_id = ? AND upload_state = 100\";\r\n arrayOfString = new String[1];\r\n arrayOfString[0] = Integer.toString(paramInt);\r\n continue;\r\n }\r\n return null;\r\n }\r\n finally\r\n {\r\n localCursor.close();\r\n }\r\n }\r\n }\r\n \r\n public static int c(hci paramhci, int paramInt)\r\n {\r\n SQLiteDatabase localSQLiteDatabase = paramhci.getReadableDatabase();\r\n String str = c;\r\n String[] arrayOfString = new String[1];\r\n arrayOfString[0] = Integer.toString(paramInt);\r\n return (int)DatabaseUtils.longForQuery(localSQLiteDatabase, str, arrayOfString);\r\n }\r\n \r\n static void c(hci paramhci, int paramInt1, int paramInt2)\r\n {\r\n if (paramInt1 == -1) {\r\n throw new IllegalStateException(52 + \"can't enable upload for invalid account: \" + paramInt1);\r\n }\r\n SQLiteDatabase localSQLiteDatabase1 = paramhci.getWritableDatabase();\r\n int i = 0;\r\n for (;;)\r\n {\r\n localSQLiteDatabase1.beginTransaction();\r\n try\r\n {\r\n int j = a(localSQLiteDatabase1, paramInt1, 40);\r\n localSQLiteDatabase1.setTransactionSuccessful();\r\n localSQLiteDatabase1.endTransaction();\r\n i += j;\r\n if (j <= 0)\r\n {\r\n if (Log.isLoggable(\"iu.UploadsManager\", 4)) {\r\n new StringBuilder(35).append(\"START; scheduled \").append(i).append(\" photos\");\r\n }\r\n SQLiteDatabase localSQLiteDatabase2 = paramhci.getReadableDatabase();\r\n String str = a;\r\n String[] arrayOfString1 = MediaRecordEntry.a.b;\r\n String[] arrayOfString2 = new String[1];\r\n arrayOfString2[0] = Integer.toString(paramInt1);\r\n Cursor localCursor = localSQLiteDatabase2.query(true, str, arrayOfString1, \"upload_account_id = ? AND upload_state = 300\", arrayOfString2, null, null, null, null);\r\n MediaRecordEntry localMediaRecordEntry;\r\n localCursor.close();\r\n }\r\n }\r\n finally {}\r\n }\r\n }\r\n \r\n /* Error */\r\n static int d(hci paramhci, int paramInt)\r\n {\r\n // Byte code:\r\n // 0: aload_0\r\n // 1: invokevirtual 260\thci:getReadableDatabase\t()Landroid/database/sqlite/SQLiteDatabase;\r\n // 4: iconst_1\r\n // 5: getstatic 22\thbm:a\tLjava/lang/String;\r\n // 8: iconst_1\r\n // 9: anewarray 26\tjava/lang/String\r\n // 12: dup\r\n // 13: iconst_0\r\n // 14: ldc_w 404\r\n // 17: aastore\r\n // 18: ldc_w 406\r\n // 21: aconst_null\r\n // 22: aconst_null\r\n // 23: aconst_null\r\n // 24: aconst_null\r\n // 25: aconst_null\r\n // 26: invokevirtual 142\tandroid/database/sqlite/SQLiteDatabase:query\t(ZLjava/lang/String;[Ljava/lang/String;Ljava/lang/String;[Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)Landroid/database/Cursor;\r\n // 29: astore_2\r\n // 30: aload_2\r\n // 31: invokeinterface 98 1 0\r\n // 36: ifeq +25 -> 61\r\n // 39: aload_2\r\n // 40: iconst_0\r\n // 41: invokeinterface 409 2 0\r\n // 46: istore 5\r\n // 48: iload 5\r\n // 50: istore 4\r\n // 52: aload_2\r\n // 53: invokeinterface 121 1 0\r\n // 58: iload 4\r\n // 60: ireturn\r\n // 61: iconst_0\r\n // 62: istore 4\r\n // 64: goto -12 -> 52\r\n // 67: astore_3\r\n // 68: aload_2\r\n // 69: invokeinterface 121 1 0\r\n // 74: aload_3\r\n // 75: athrow\r\n // Local variable table:\r\n // start\tlength\tslot\tname\tsignature\r\n // 0\t76\t0\tparamhci\thci\r\n // 0\t76\t1\tparamInt\tint\r\n // 29\t40\t2\tlocalCursor\tCursor\r\n // 67\t8\t3\tlocalObject\tObject\r\n // 50\t13\t4\ti\tint\r\n // 46\t3\t5\tj\tint\r\n // Exception table:\r\n // from\tto\ttarget\ttype\r\n // 30\t48\t67\tfinally\r\n }\r\n}\r\n\r\n\r\r\n/* Location: F:\\apktool\\apktool\\com.google.android.apps.plus\\classes-dex2jar.jar\r\r\n * Qualified Name: hbm\r\r\n * JD-Core Version: 0.7.0.1\r\r\n */", "meta": {"content_hash": "7a0744910524651372847a2df967f466", "timestamp": "", "source": "github", "line_count": 547, "max_line_length": 376, "avg_line_length": 38.04753199268738, "alnum_prop": 0.6075341149336921, "repo_name": "ChiangC/FMTech", "id": "e2130c80069385b9bc76776af795a61f615ab637", "size": "20812", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "GooglePlus/app/src/main/java/hbm.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C++", "bytes": "935"}, {"name": "CMake", "bytes": "1767"}, {"name": "HTML", "bytes": "28061"}, {"name": "Java", "bytes": "47051212"}, {"name": "JavaScript", "bytes": "1535"}, {"name": "Makefile", "bytes": "5823"}]}} {"text": "import { \n Component \n} from '@angular/core';\n\n@Component({\n templateUrl: 'index.html',\n styleUrls: ['index.css'],\n})\nexport \nclass Home { }\n", "meta": {"content_hash": "db90c23e0b2c7340190c522b21f4c61e", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 28, "avg_line_length": 14.4, "alnum_prop": 0.6319444444444444, "repo_name": "MiningTheDisclosures/conflict-minerals-data", "id": "221fc21ccf27026a2ab8cd0402a118631cce1c7a", "size": "144", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "conflict_minerals_data/frontend/app/components/home/index.ts", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "2784"}, {"name": "HTML", "bytes": "4624"}, {"name": "JavaScript", "bytes": "4426886"}, {"name": "Jupyter Notebook", "bytes": "69090"}, {"name": "Python", "bytes": "37833"}, {"name": "TypeScript", "bytes": "15706"}]}} {"text": "var params = {\n tables: 14,\n currentTable: 0,\n currentQuestion: -1\n};\nvar questions = [\n { q: \"\u00bfCu\u00e1l es el color preferido de Silvina?\", o: [ \"Verde\", \"Amarillo\", \"Azul\" ], a: 1 },\n { q: \"\u00bfA qu\u00e9 edad sali\u00f3 Pablo del closet?\", o: [ \"15\", \"18\", \"Hoy\" ], a: 0 },\n { q: \"\u00bfQui\u00e9n se tom\u00f3 todo el vino?\", o: [ \"La mona Gimenez\", \"Silchu\", \"Pablo\" ], a: 2 },\n { q: \"\u00bfCual de estas prendas no usaria pablo?\", o: [ \"Pollera\", \"Zapatos con taco\", \"Medias de red\" ], a: 0 }\n];\n\n$().ready(function() {\n\n var nextTable = function() {\n params.currentTable++;\n params.currentQuestion = -1;\n next();\n };\n\n var showQuestion = function() {\n var question = questions[params.currentQuestion];\n $('#question').text(question.q);\n $('#answers').empty();\n for (var i in question.o) {\n var value = question.o[i];\n $('#answers').append($('\n\n\n{% endblock %}\n\n", "meta": {"content_hash": "b04f11482951afefef9f52440da6ea38", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 46, "avg_line_length": 22.125, "alnum_prop": 0.6045197740112994, "repo_name": "rubeon/django-xblog", "id": "a6d7d2442ec3977dfad1a57ea3fc36c5f8767cd0", "size": "177", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "xblog/templates/xblog/author_form.html", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "CSS", "bytes": "73797"}, {"name": "HTML", "bytes": "11830"}, {"name": "JavaScript", "bytes": "498"}, {"name": "Python", "bytes": "242211"}]}} {"text": "package com.ezweather.app.activity;\n\nimport android.app.Activity;\nimport android.app.ProgressDialog;\nimport android.content.Intent;\nimport android.content.SharedPreferences;\nimport android.os.Bundle;\nimport android.preference.PreferenceManager;\nimport android.text.TextUtils;\nimport android.util.Log;\nimport android.view.View;\nimport android.view.Window;\nimport android.widget.AdapterView;\nimport android.widget.ArrayAdapter;\nimport android.widget.ListView;\nimport android.widget.TextView;\nimport android.widget.Toast;\n\nimport com.ezweather.app.R;\nimport com.ezweather.app.model.City;\nimport com.ezweather.app.model.County;\nimport com.ezweather.app.db.EzWeatherDB;\nimport com.ezweather.app.model.Province;\nimport com.ezweather.app.util.HttpCallbackListener;\nimport com.ezweather.app.util.HttpUtil;\nimport com.ezweather.app.util.Utility;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n/**\n * Created by Oniros on 2016/4/8.\n */\npublic class ChooseAreaActivity extends Activity {\n\n public static final int LEVEL_PROVINCE = 0;\n public static final int LEVEL_CITY = 1;\n public static final int LEVEL_COUNTY = 2;\n\n private ProgressDialog progressDialog;\n private TextView titleText;\n private ListView listView;\n private ArrayAdapter adapter;\n private EzWeatherDB ezWeatherDB;\n private List dataList = new ArrayList();\n /**\n * \u7701\u5217\u8868\n */\n private List provinceList;\n /**\n * \u5e02\u5217\u8868\n */\n private List cityList;\n /**\n * \u53bf\u5217\u8868\n */\n private List countyList;\n /**\n * \u9009\u4e2d\u7684\u7701\u4efd\n */\n private Province selectedProvince;\n /**\n * \u9009\u4e2d\u7684\u57ce\u5e02\n */\n private City selectedCity;\n /**\n * \u5f53\u524d\u9009\u4e2d\u7684\u7ea7\u522b\n */\n private int currentLevel;\n /**\n * \u662f\u5426\u4eceWeatherActivity\u4e2d\u8df3\u8f6c\u8fc7\u6765\u3002\n */\n private boolean isFromWeatherActivity;\n\n @Override\n protected void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n isFromWeatherActivity = getIntent().getBooleanExtra(\"from_weather_activity\", false);\n SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);\n if (prefs.getBoolean(\"city_selected\", false) && !isFromWeatherActivity) {\n Intent intent = new Intent(this, WeatherActivity.class);\n startActivity(intent);\n finish();\n return;\n }\n requestWindowFeature(Window.FEATURE_NO_TITLE);\n setContentView(R.layout.choose_area);\n listView = (ListView) findViewById(R.id.list_view);\n titleText = (TextView) findViewById(R.id.title_text);\n adapter = new ArrayAdapter(this, android.R.layout.simple_list_item_1, dataList);\n listView.setAdapter(adapter);\n ezWeatherDB = EzWeatherDB.getInstance(this);\n listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {\n @Override\n public void onItemClick(AdapterView arg0, View view, int index,\n long arg3) {\n if (currentLevel == LEVEL_PROVINCE) {\n selectedProvince = provinceList.get(index);\n queryCities();\n } else if (currentLevel == LEVEL_CITY) {\n selectedCity = cityList.get(index);\n queryCounties();\n } else if (currentLevel == LEVEL_COUNTY) {\n String countyCode = countyList.get(index).getCountyCode();\n Intent intent = new Intent(ChooseAreaActivity.this, WeatherActivity.class);\n intent.putExtra(\"county_code\", countyCode);\n startActivity(intent);\n finish();\n }\n }\n });\n queryProvinces(); // \u52a0\u8f7d\u7701\u7ea7\u6570\u636e\n }\n\n /**\n * \u67e5\u8be2\u5168\u56fd\u6240\u6709\u7684\u7701\uff0c\u4f18\u5148\u4ece\u6570\u636e\u5e93\u67e5\u8be2\uff0c\u5982\u679c\u6ca1\u6709\u67e5\u8be2\u5230\u518d\u53bb\u670d\u52a1\u5668\u4e0a\u67e5\u8be2\u3002\n */\n private void queryProvinces() {\n provinceList =ezWeatherDB .loadProvince();\n if (provinceList.size() > 0) {\n dataList.clear();\n for (Province province : provinceList) {\n dataList.add(province.getProvinceName());\n }\n adapter.notifyDataSetChanged();\n listView.setSelection(0);\n titleText.setText(\"\u4e2d\u56fd\");\n currentLevel = LEVEL_PROVINCE;\n } else {\n queryFromServer(null, \"province\");\n }\n }\n\n /**\n * \u67e5\u8be2\u9009\u4e2d\u7701\u5185\u6240\u6709\u7684\u5e02\uff0c\u4f18\u5148\u4ece\u6570\u636e\u5e93\u67e5\u8be2\uff0c\u5982\u679c\u6ca1\u6709\u67e5\u8be2\u5230\u518d\u53bb\u670d\u52a1\u5668\u4e0a\u67e5\u8be2\u3002\n */\n private void queryCities() {\n cityList = ezWeatherDB.loadCities(selectedProvince.getId());\n if (cityList.size() > 0) {\n dataList.clear();\n for (City city : cityList) {\n dataList.add(city.getCityName());\n }\n adapter.notifyDataSetChanged();\n listView.setSelection(0);\n titleText.setText(selectedProvince.getProvinceName());\n currentLevel = LEVEL_CITY;\n } else {\n queryFromServer(selectedProvince.getProvinceCode(), \"city\");\n }\n }\n\n /**\n * \u67e5\u8be2\u9009\u4e2d\u5e02\u5185\u6240\u6709\u7684\u53bf\uff0c\u4f18\u5148\u4ece\u6570\u636e\u5e93\u67e5\u8be2\uff0c\u5982\u679c\u6ca1\u6709\u67e5\u8be2\u5230\u518d\u53bb\u670d\u52a1\u5668\u4e0a\u67e5\u8be2\u3002\n */\n private void queryCounties() {\n countyList = ezWeatherDB.loadCounties(selectedCity.getId());\n if (countyList.size() > 0) {\n dataList.clear();\n for (County county : countyList) {\n dataList.add(county.getCountyName());\n }\n adapter.notifyDataSetChanged();\n listView.setSelection(0);\n titleText.setText(selectedCity.getCityName());\n currentLevel = LEVEL_COUNTY;\n } else {\n queryFromServer(selectedCity.getCityCode(), \"county\");\n }\n }\n\n /**\n * \u6839\u636e\u4f20\u5165\u7684\u4ee3\u53f7\u548c\u7c7b\u578b\u4ece\u670d\u52a1\u5668\u4e0a\u67e5\u8be2\u7701\u5e02\u53bf\u6570\u636e\u3002\n */\n private void queryFromServer(final String code, final String type) {\n String address;\n if (!TextUtils.isEmpty(code)) {\n address = \"http://www.weather.com.cn/data/list3/city\" + code + \".xml\";\n } else {\n address = \"http://www.weather.com.cn/data/list3/city.xml\";\n }\n showProgressDialog();\n HttpUtil.sendHttpRequest(address, new HttpCallbackListener() {\n @Override\n public void onFinish(String response) {\n boolean result = false;\n if (\"province\".equals(type)) {\n result = Utility.handleProvinceResponse(ezWeatherDB,\n response);\n } else if (\"city\".equals(type)) {\n result = Utility.handleCitiesResponse(ezWeatherDB,\n response, selectedProvince.getId());\n } else if (\"county\".equals(type)) {\n result = Utility.handleCountiesResponse(ezWeatherDB,\n response, selectedCity.getId());\n }\n if (result) {\n // \u901a\u8fc7runOnUiThread()\u65b9\u6cd5\u56de\u5230\u4e3b\u7ebf\u7a0b\u5904\u7406\u903b\u8f91\n runOnUiThread(new Runnable() {\n @Override\n public void run() {\n closeProgressDialog();\n if (\"province\".equals(type)) {\n queryProvinces();\n } else if (\"city\".equals(type)) {\n queryCities();\n } else if (\"county\".equals(type)) {\n queryCounties();\n }\n }\n });\n }\n }\n\n @Override\n public void onError(Exception e) {\n // \u901a\u8fc7runOnUiThread()\u65b9\u6cd5\u56de\u5230\u4e3b\u7ebf\u7a0b\u5904\u7406\u903b\u8f91\n runOnUiThread(new Runnable() {\n @Override\n public void run() {\n closeProgressDialog();\n Toast.makeText(ChooseAreaActivity.this,\n \"\u52a0\u8f7d\u5931\u8d25\", Toast.LENGTH_SHORT).show();\n }\n });\n }\n });\n }\n\n /**\n * \u663e\u793a\u8fdb\u5ea6\u5bf9\u8bdd\u6846\n */\n private void showProgressDialog() {\n if (progressDialog == null) {\n progressDialog = new ProgressDialog(this);\n progressDialog.setMessage(\"\u6b63\u5728\u52a0\u8f7d...\");\n progressDialog.setCanceledOnTouchOutside(false);\n }\n progressDialog.show();\n }\n\n /**\n * \u5173\u95ed\u8fdb\u5ea6\u5bf9\u8bdd\u6846\n */\n private void closeProgressDialog() {\n if (progressDialog != null) {\n progressDialog.dismiss();\n }\n }\n\n /**\n * \u6355\u83b7Back\u6309\u952e\uff0c\u6839\u636e\u5f53\u524d\u7684\u7ea7\u522b\u6765\u5224\u65ad\uff0c\u6b64\u65f6\u5e94\u8be5\u8fd4\u56de\u5e02\u5217\u8868\u3001\u7701\u5217\u8868\u3001\u8fd8\u662f\u76f4\u63a5\u9000\u51fa\u3002\n */\n @Override\n public void onBackPressed() {\n if (currentLevel == LEVEL_COUNTY) {\n queryCities();\n } else if (currentLevel == LEVEL_CITY) {\n queryProvinces();\n } else {\n if (isFromWeatherActivity) {\n// Intent intent = new Intent(this, WeatherActivity.class);\n// startActivity(intent);\n }\n finish();\n }\n }\n\n}", "meta": {"content_hash": "56caf12ddd93e032926c8c13d38247b9", "timestamp": "", "source": "github", "line_count": 269, "max_line_length": 96, "avg_line_length": 33.152416356877325, "alnum_prop": 0.554047992823503, "repo_name": "Oniros6/EZweather", "id": "214a3f813e79026dda311f952e041527230eca2a", "size": "9432", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/ezweather/app/activity/ChooseAreaActivity.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "33766"}]}} {"text": "namespace crashpad {\n\nclass ProcessReaderWin;\n\nnamespace internal {\n\nclass ExceptionSnapshotWin final : public ExceptionSnapshot {\n public:\n ExceptionSnapshotWin();\n ~ExceptionSnapshotWin() override;\n\n //! \\brief Initializes the object.\n //!\n //! \\param[in] process_reader A ProcessReader for the process that sustained\n //! the exception.\n //! \\param[in] thread_id The thread ID in which the exception occurred.\n //! \\param[in] exception_pointers_address The address of an\n //! `EXCEPTION_POINTERS` record in the target process, passed through from\n //! the exception handler.\n //!\n //! \\return `true` if the snapshot could be created, `false` otherwise with\n //! an appropriate message logged.\n bool Initialize(ProcessReaderWin* process_reader,\n DWORD thread_id,\n WinVMAddress exception_pointers);\n\n // ExceptionSnapshot:\n\n const CPUContext* Context() const override;\n uint64_t ThreadID() const override;\n uint32_t Exception() const override;\n uint32_t ExceptionInfo() const override;\n uint64_t ExceptionAddress() const override;\n const std::vector& Codes() const override;\n\n private:\n#if defined(ARCH_CPU_X86_FAMILY)\n union {\n CPUContextX86 x86;\n CPUContextX86_64 x86_64;\n } context_union_;\n#endif\n CPUContext context_;\n std::vector codes_;\n uint64_t thread_id_;\n uint64_t exception_address_;\n uint32_t exception_flags_;\n DWORD exception_code_;\n InitializationStateDcheck initialized_;\n\n DISALLOW_COPY_AND_ASSIGN(ExceptionSnapshotWin);\n};\n\n} // namespace internal\n} // namespace crashpad\n\n#endif // CRASHPAD_SNAPSHOT_WIN_EXCEPTION_SNAPSHOT_WIN_H_\n", "meta": {"content_hash": "773c56745f5f05e787f04e40773ce653", "timestamp": "", "source": "github", "line_count": 57, "max_line_length": 80, "avg_line_length": 29.12280701754386, "alnum_prop": 0.7096385542168675, "repo_name": "Teamxrtc/webrtc-streaming-node", "id": "588c4ac9f642452d54534bf6c1916a7022b5de8c", "size": "2659", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "third_party/webrtc/src/chromium/src/third_party/crashpad/crashpad/snapshot/win/exception_snapshot_win.h", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "44"}, {"name": "C++", "bytes": "221840"}, {"name": "HTML", "bytes": "2383"}, {"name": "JavaScript", "bytes": "37396"}, {"name": "Python", "bytes": "2860"}, {"name": "Shell", "bytes": "104"}]}} {"text": "\r\n\r\n/*\r\n * timeval.h 1.0 01/12/19\r\n *\r\n * Defines gettimeofday, timeval, etc. for Win32\r\n *\r\n * By Wu Yongwei\r\n *\r\n */\r\n\r\n#ifndef _TIMEVAL_H\r\n#define _TIMEVAL_H\r\n\r\n#ifdef _WIN32\r\n\r\n/* Modified to compile as ANSI C without include of windows.h\r\n If this gives problems with future Windows/MSC versions, then\r\n uncomment the USE_WINDOWS_H definition to switch back. */\r\n/* #define USE_WINDOWS_H */\r\n#ifdef USE_WINDOWS_H\r\n #define WIN32_LEAN_AND_MEAN\r\n #include \r\n#else\r\n#ifndef _INC_WINDOWS\r\n #define VOID void\r\n #define WINAPI __stdcall\r\n #define OUT\r\n #define WINBASEAPI\r\n\r\n typedef long LONG;\r\n typedef unsigned long DWORD;\r\n typedef __int64 LONGLONG;\r\n\r\n typedef struct _FILETIME {\r\n DWORD dwLowDateTime;\r\n DWORD dwHighDateTime;\r\n } FILETIME, *LPFILETIME;\r\n\r\n typedef union _LARGE_INTEGER {\r\n /* Removed unnamed struct,\r\n it is not ANSI C compatible*/\r\n /* struct {\r\n DWORD LowPart;\r\n LONG HighPart;\r\n }; */\r\n struct {\r\n DWORD LowPart;\r\n LONG HighPart;\r\n } u;\r\n LONGLONG QuadPart;\r\n } LARGE_INTEGER;\r\n\r\n WINBASEAPI VOID WINAPI\r\n GetSystemTimeAsFileTime(OUT LPFILETIME lpSystemTimeAsFileTime);\r\n#endif /* _INC_WINDOWS */\r\n#endif /* USE_WINDOWS_H */\r\n\r\n#include \r\n\r\n#ifndef __GNUC__\r\n#define EPOCHFILETIME (116444736000000000i64)\r\n#else\r\n#define EPOCHFILETIME (116444736000000000LL)\r\n#endif\r\n\r\nstruct timeval {\r\n long tv_sec; /* seconds */\r\n long tv_usec; /* microseconds */\r\n};\r\n\r\nstruct timezone {\r\n int tz_minuteswest; /* minutes W of Greenwich */\r\n int tz_dsttime; /* type of dst correction */\r\n};\r\n\r\n__inline int gettimeofday(struct timeval *tv, struct timezone *tz)\r\n{\r\n FILETIME ft;\r\n LARGE_INTEGER li;\r\n __int64 t;\r\n static int tzflag;\r\n\r\n if (tv)\r\n {\r\n GetSystemTimeAsFileTime(&ft);\r\n\r\n /* The following two lines have been modified to use the named\r\n union member. Unnamed members are not ANSI C compatible. */\r\n li.u.LowPart = ft.dwLowDateTime;\r\n li.u.HighPart = ft.dwHighDateTime;\r\n t = li.QuadPart; /* In 100-nanosecond intervals */\r\n t -= EPOCHFILETIME; /* Offset to the Epoch time */\r\n t /= 10; /* In microseconds */\r\n tv->tv_sec = (long)(t / 1000000);\r\n tv->tv_usec = (long)(t % 1000000);\r\n }\r\n\r\n if (tz)\r\n {\r\n if (!tzflag)\r\n {\r\n _tzset();\r\n tzflag++;\r\n }\r\n tz->tz_minuteswest = _timezone / 60;\r\n tz->tz_dsttime = _daylight;\r\n }\r\n\r\n return 0;\r\n}\r\n\r\n#else /* _WIN32 */\r\n\r\n#include \r\n\r\n#endif /* _WIN32 */\r\n\r\n#endif /* _TIMEVAL_H */\r\n", "meta": {"content_hash": "1a6559bbdae2ef5db88bf00c8b922f71", "timestamp": "", "source": "github", "line_count": 119, "max_line_length": 70, "avg_line_length": 23.352941176470587, "alnum_prop": 0.5588341129902843, "repo_name": "egorpushkin/neurolab", "id": "58b1f24f0b3ce0ab671619f2d25379db8e17208f", "size": "3639", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "source/Plugins/ScriptEnv/LuaNeuroApi/Extensions/Training/fann_1.2.1/Engine/compat_time.h", "mode": "33261", "license": "mit", "language": [{"name": "Batchfile", "bytes": "112"}, {"name": "C", "bytes": "1787692"}, {"name": "C++", "bytes": "9312245"}, {"name": "CSS", "bytes": "44465"}, {"name": "Clarion", "bytes": "5268"}, {"name": "HTML", "bytes": "408820"}, {"name": "JavaScript", "bytes": "5476"}, {"name": "Makefile", "bytes": "39256"}, {"name": "Objective-C", "bytes": "97952"}, {"name": "TeX", "bytes": "3128"}]}} {"text": "\n\n\n \n\n \n \n\n", "meta": {"content_hash": "80daabfb0b4835ea145f6d3ece59d905", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 60, "avg_line_length": 38.225, "alnum_prop": 0.7325049051667757, "repo_name": "alexzatsepin/omim", "id": "cf02975a35749ef615c2a85fc58f923ac78992db", "size": "1529", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "android/res/layout/place_page_description_layout.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Awk", "bytes": "3962"}, {"name": "Batchfile", "bytes": "5586"}, {"name": "C", "bytes": "13984459"}, {"name": "C++", "bytes": "148411082"}, {"name": "CMake", "bytes": "249320"}, {"name": "CSS", "bytes": "26798"}, {"name": "Common Lisp", "bytes": "17587"}, {"name": "DIGITAL Command Language", "bytes": "36710"}, {"name": "GLSL", "bytes": "58384"}, {"name": "Gherkin", "bytes": "305230"}, {"name": "Go", "bytes": "12771"}, {"name": "HTML", "bytes": "9503594"}, {"name": "Inno Setup", "bytes": "4337"}, {"name": "Java", "bytes": "2486120"}, {"name": "JavaScript", "bytes": "29076"}, {"name": "Lua", "bytes": "57672"}, {"name": "M4", "bytes": "53992"}, {"name": "Makefile", "bytes": "429637"}, {"name": "Metal", "bytes": "77540"}, {"name": "Module Management System", "bytes": "2080"}, {"name": "Objective-C", "bytes": "2046640"}, {"name": "Objective-C++", "bytes": "1300948"}, {"name": "PHP", "bytes": "2841"}, {"name": "Perl", "bytes": "57807"}, {"name": "PowerShell", "bytes": "1885"}, {"name": "Python", "bytes": "584274"}, {"name": "Roff", "bytes": "13545"}, {"name": "Ruby", "bytes": "66800"}, {"name": "Shell", "bytes": "1317925"}, {"name": "Swift", "bytes": "511409"}, {"name": "sed", "bytes": "236"}]}} {"text": "\n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n#include \n\n#include \n#include \n#include \n\n#define LLC_LENGTH 4\n#define IP_OFFSET LLC_LENGTH\n#define IP_ADDR_LENGTH 4\n#define IP_HDR_LENGTH 20\n#define IP_LENGTH_OFFSET IP_OFFSET + 2\n#define IP_CHKSUM_OFFSET IP_OFFSET + 10\n#define IP_SRC_ADDR_OFFSET IP_CHKSUM_OFFSET + 2\n#define IP_DEST_ADDR_OFFSET IP_CHKSUM_OFFSET + IP_ADDR_LENGTH\n\n#define UDP_OFFSET IP_OFFSET + IP_HDR_LENGTH\n#define UDP_HDR_LENGTH 8\n#define UDP_LENGTH_OFFSET UDP_OFFSET + 4\n#define UDP_CHKSUM_OFFSET UDP_OFFSET + 6\n#define DATA_OFFSET UDP_OFFSET + UDP_HDR_LENGTH\n#define MAX_PACKET 65536\n#define DEFAULT_SRC_PORT 55555\n#define DEFAULT_DEST_PORT CCN_DEFAULT_UNICAST_PORT_NUMBER\n\nstatic void\nusage(const char *progname)\n{\n fprintf(stderr,\n \"%s [ ...]\\n\"\n \" Reads ccnb blocks from one or more files, and writes them in pcap format\\n\"\n \" to stdout.\\n\"\n \" ccnb blocks can be generated by any of the other utility programs.\\n\",\n progname);\n exit(1);\n}\n\nstatic int\ndump_udp_packet(pcap_dumper_t *dump_file, \n unsigned char *ip_src_addr, /* ipv4, localhost if NULL */\n unsigned char *ip_dest_addr, /* localhost if NULL */\n unsigned short udp_src_port, /* 55555 if 0 */\n unsigned short udp_dest_port,\n const unsigned char *data, size_t data_len, /* data; could be whole ccnb, could\n just be contents */\n struct timeval *ts) { /* timing info */\n\n unsigned char pktbuf[MAX_PACKET];\n uint32_t llc_val = PF_INET; // in host byte order\n\n uint16_t nsrc_port = htons((0 == udp_src_port) ? DEFAULT_SRC_PORT : udp_src_port);\n uint16_t ndest_port = htons((0 == udp_dest_port) ? DEFAULT_DEST_PORT : udp_dest_port);\n uint16_t nudp_len = htons(data_len + UDP_HDR_LENGTH);\n uint16_t nip_len = htons(data_len + UDP_HDR_LENGTH + IP_HDR_LENGTH);\n\n size_t frame_len = data_len + UDP_HDR_LENGTH + IP_HDR_LENGTH + LLC_LENGTH;\n struct pcap_pkthdr pcap_header;\n \n const unsigned char ipHdr[] = {\n // IP header, localhost to localhost\n 0x45, // IPv4, 20 byte header\n 0x00, // diff serv field\n 0x00, 0x00, // length -- UDP length + 20\n 0x1a, 0x62, // identification\n 0x00, // flags\n 0x00, // fragment offset\n 0x40, // TTL (64)\n 0x11, // proto (UDP=11)\n 0x00, 0x00, // ip checksum (calculate, or leave 0 for validation disabled)\n 0x7f, 0x00, 0x00, 0x01, // source, localhost if not overwritten\n 0x7f, 0x00, 0x00, 0x01 // dest, localhost if not overwritten\n };\n\n unsigned char udpHdr[UDP_HDR_LENGTH];\n memset(udpHdr, 0, UDP_HDR_LENGTH);\n \n memcpy(&pktbuf[0], (unsigned char *)&llc_val, LLC_LENGTH);\n memcpy(&pktbuf[IP_OFFSET], ipHdr, IP_HDR_LENGTH);\n memcpy(&pktbuf[IP_LENGTH_OFFSET], &nip_len, 2);\n if (NULL != ip_src_addr) {\n memcpy(&pktbuf[IP_SRC_ADDR_OFFSET], ip_src_addr, IP_ADDR_LENGTH);\n }\n if (NULL != ip_dest_addr) {\n memcpy(&pktbuf[IP_DEST_ADDR_OFFSET], ip_dest_addr, IP_ADDR_LENGTH);\n }\n\n memcpy(&pktbuf[UDP_OFFSET], &nsrc_port, sizeof(unsigned short));\n memcpy(&pktbuf[UDP_OFFSET + sizeof(unsigned short)], &ndest_port, sizeof(unsigned short));\n memcpy(&pktbuf[UDP_LENGTH_OFFSET], &nudp_len, sizeof(unsigned short));\n\n memcpy(&pktbuf[DATA_OFFSET], data, data_len);\n\n pcap_header.len = pcap_header.caplen = frame_len;\n if (NULL != ts) {\n pcap_header.ts.tv_sec = ts->tv_sec;\n pcap_header.ts.tv_usec = ts->tv_usec;\n }\n\n pcap_dump((unsigned char *)dump_file, &pcap_header, &pktbuf[0]);\n\n if (0 != pcap_dump_flush(dump_file)) {\n fprintf(stderr, \"Error flushing pcap dump...\\n\");\n return -1;\n }\n return 0;\n}\n\nstatic int\nprocess_test(pcap_dumper_t *pcap_out, int content_only,\n unsigned char *ip_src_addr, /* ipv4, localhost if NULL */\n unsigned char *ip_dest_addr, /* localhost if NULL */\n unsigned short udp_src_port, /* 55555 if 0 */\n unsigned short udp_dest_port,\n unsigned char *data, size_t n)\n{\n struct ccn_skeleton_decoder skel_decoder = {0};\n struct ccn_skeleton_decoder *d = &skel_decoder;\n struct ccn_parsed_ContentObject content;\n struct ccn_indexbuf *comps = ccn_indexbuf_create();\n const unsigned char * content_value;\n size_t content_length;\n int res = 0;\n size_t s;\n\nretry:\n s = ccn_skeleton_decode(d, data, n);\n if (d->state < 0) {\n res = 1;\n fprintf(stderr, \"error state %d after %d of %d chars\\n\",\n (int)d->state, (int)s, (int)n);\n } else if (s == 0) {\n fprintf(stderr, \"nothing to do\\n\");\n } else {\n if (s < n) {\n if (!content_only) {\n if (dump_udp_packet(pcap_out, ip_src_addr, ip_dest_addr, \n udp_src_port, udp_dest_port, data, s, NULL) != 0) {\n res = 2;\n }\n } else {\n if (ccn_parse_ContentObject(data, s, &content, comps) != 0) {\n fprintf(stderr, \"unable to parse content object\\n\");\n res = 1;\n } else if (ccn_content_get_value(data, s, &content, &content_value, &content_length) != 0) {\n fprintf(stderr, \"unable to retrieve content value\\n\");\n res = 1;\n } else if (dump_udp_packet(pcap_out, ip_src_addr, ip_dest_addr, \n udp_src_port, udp_dest_port, \n content_value, content_length, NULL) != 0) {\n res = 2;\n }\n }\n /* fprintf(stderr, \"resuming at index %d\\n\", (int)d->index); */\n data += s;\n n -= s;\n if (res != 0) {\n fprintf(stderr, \"Error dumping content.\\n\");\n return res;\n }\n goto retry;\n }\n fprintf(stderr, \"\\n\");\n }\n if (!CCN_FINAL_DSTATE(d->state)) {\n res = 1;\n fprintf(stderr, \"incomplete state %d after %d of %d chars\\n\",\n (int)d->state, (int)s, (int)n);\n } else {\n if (!content_only) {\n if (dump_udp_packet(pcap_out, ip_src_addr, ip_dest_addr, \n udp_src_port, udp_dest_port, data, s, NULL) != 0) {\n res = 2;\n }\n } else {\n if (ccn_parse_ContentObject(data, s, &content, comps) != 0) {\n fprintf(stderr, \"unable to parse content object\\n\");\n res = 1;\n } else if (ccn_content_get_value(data, s, &content, &content_value, &content_length) != 0) {\n fprintf(stderr, \"unable to retrieve content value\\n\");\n res = 1;\n } else if (dump_udp_packet(pcap_out, ip_src_addr, ip_dest_addr, \n udp_src_port, udp_dest_port, content_value, content_length, NULL) != 0) {\n res = 2;\n }\n }\n\n res = 1;\n }\n return(res);\n}\n\nstatic int\nprocess_fd(pcap_dumper_t *pcap_out, int fd, int content_only,\n unsigned char *ip_src_addr, /* ipv4, localhost if NULL */\n unsigned char *ip_dest_addr, /* localhost if NULL */\n unsigned short udp_src_port, /* 55555 if 0 */\n unsigned short udp_dest_port\n )\n{\n unsigned char *buf;\n ssize_t len;\n struct stat s;\n int res = 0;\n\n res = fstat(fd, &s);\n len = s.st_size;\n buf = (unsigned char *)mmap((void *)NULL, len, PROT_READ, MAP_PRIVATE, fd, 0);\n if (buf == (void *)-1) return (1);\n fprintf(stderr, \" \\n\", (unsigned long)len);\n res |= process_test(pcap_out, content_only,\n ip_src_addr, ip_dest_addr, udp_src_port, udp_dest_port,\n buf, len);\n munmap((void *)buf, len);\n return(res);\n}\n\nint\nmain(int argc, char **argv)\n{\n pcap_t *pcap = NULL;\n pcap_dumper_t *pcap_out = NULL;\n int fd;\n int i;\n int res = 0;\n \n if (argc < 2) {\n usage(argv[0]);\n }\n\n pcap = pcap_open_dead(DLT_NULL, MAX_PACKET);\n if (NULL == pcap) {\n fprintf(stderr, \"Cannot open pcap descriptor!\\n\");\n exit(-1);\n }\n\n pcap_out = pcap_dump_open(pcap, \"-\");\n if (NULL == pcap_out) {\n fprintf(stderr, \"Cannot open output stdout!\\n\");\n usage(argv[0]);\n }\n\n for (i = 1; argv[i] != 0; i++) {\n fprintf(stderr, \"\\n\", argv[i]);\n\n fd = open(argv[i], O_RDONLY);\n if (-1 == fd) {\n perror(argv[i]);\n return(1);\n }\n\n /* DKS -- eventually take IP addresses and ports from command line,\n as well as whether to dump only the ccn content. */\n res |= process_fd(pcap_out, fd, 0, NULL, NULL, 0, 0);\n }\n\n pcap_dump_close(pcap_out);\n pcap_close(pcap);\n return res;\n}\n", "meta": {"content_hash": "cc28a4f4531eaeeff9aee329ef682888", "timestamp": "", "source": "github", "line_count": 269, "max_line_length": 112, "avg_line_length": 34.3457249070632, "alnum_prop": 0.5437817945665115, "repo_name": "MobileCloudNetworking/icnaas", "id": "4298759f2cfb6a8773cf192f3c60466857107815", "size": "10164", "binary": false, "copies": "10", "ref": "refs/heads/master", "path": "mcn-ccn-router/ccnx-0.8.2/android/CCNx-Android-Services/jni/csrc/cmd/ccndumppcap.c", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "5189410"}, {"name": "C++", "bytes": "6050"}, {"name": "Groff", "bytes": "152559"}, {"name": "HTML", "bytes": "1686578"}, {"name": "Java", "bytes": "4146221"}, {"name": "Makefile", "bytes": "224881"}, {"name": "Perl", "bytes": "1513"}, {"name": "Python", "bytes": "193499"}, {"name": "Shell", "bytes": "180150"}, {"name": "Smarty", "bytes": "2619"}]}} {"text": "annotations = $annotations;\n }\n /**\n * @return Annotation[]\n */\n public function getAnnotations()\n {\n return $this->annotations;\n }\n /**\n * @param Attachment[]\n */\n public function setAttachments($attachments)\n {\n $this->attachments = $attachments;\n }\n /**\n * @return Attachment[]\n */\n public function getAttachments()\n {\n return $this->attachments;\n }\n /**\n * @param GoogleChatV1ContextualAddOnMarkup[]\n */\n public function setContextualAddOnMarkup($contextualAddOnMarkup)\n {\n $this->contextualAddOnMarkup = $contextualAddOnMarkup;\n }\n /**\n * @return GoogleChatV1ContextualAddOnMarkup[]\n */\n public function getContextualAddOnMarkup()\n {\n return $this->contextualAddOnMarkup;\n }\n /**\n * @param GsuiteIntegrationMetadata[]\n */\n public function setGsuiteIntegrationMetadata($gsuiteIntegrationMetadata)\n {\n $this->gsuiteIntegrationMetadata = $gsuiteIntegrationMetadata;\n }\n /**\n * @return GsuiteIntegrationMetadata[]\n */\n public function getGsuiteIntegrationMetadata()\n {\n return $this->gsuiteIntegrationMetadata;\n }\n /**\n * @param string\n */\n public function setText($text)\n {\n $this->text = $text;\n }\n /**\n * @return string\n */\n public function getText()\n {\n return $this->text;\n }\n /**\n * @param UserId\n */\n public function setUserId(UserId $userId)\n {\n $this->userId = $userId;\n }\n /**\n * @return UserId\n */\n public function getUserId()\n {\n return $this->userId;\n }\n}\n\n// Adding a class alias for backwards compatibility with the previous class name.\nclass_alias(PrivateMessageInfo::class, 'Google_Service_CloudSearch_PrivateMessageInfo');\n", "meta": {"content_hash": "b19234ca59175992c3a0db5ef398403d", "timestamp": "", "source": "github", "line_count": 111, "max_line_length": 88, "avg_line_length": 22.576576576576578, "alnum_prop": 0.6879489225857941, "repo_name": "googleapis/google-api-php-client-services", "id": "ca297a1e082d1b15c1a37812b81e1e997b732ff2", "size": "3096", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "src/CloudSearch/PrivateMessageInfo.php", "mode": "33188", "license": "apache-2.0", "language": [{"name": "PHP", "bytes": "55414116"}, {"name": "Python", "bytes": "427325"}, {"name": "Shell", "bytes": "787"}]}} {"text": "\npackage java.lang.constant;\n\nimport java.lang.invoke.CallSite;\nimport java.lang.invoke.MethodHandle;\nimport java.lang.invoke.MethodHandles;\nimport java.util.Arrays;\nimport java.util.Objects;\nimport java.util.stream.Stream;\n\nimport static java.lang.constant.ConstantDescs.CD_String;\nimport static java.lang.constant.ConstantUtils.EMPTY_CONSTANTDESC;\nimport static java.lang.constant.ConstantUtils.validateMemberName;\nimport static java.util.Objects.requireNonNull;\nimport static java.util.stream.Collectors.joining;\n\n/**\n * A nominal descriptor for an\n * {@code invokedynamic} call site.\n *\n *

Concrete subtypes of {@linkplain DynamicCallSiteDesc} should be immutable\n * and their behavior should not rely on object identity.\n *\n * @since 12\n */\npublic class DynamicCallSiteDesc {\n\n private final DirectMethodHandleDesc bootstrapMethod;\n private final ConstantDesc[] bootstrapArgs;\n private final String invocationName;\n private final MethodTypeDesc invocationType;\n\n /**\n * Creates a nominal descriptor for an {@code invokedynamic} call site.\n *\n * @param bootstrapMethod a {@link DirectMethodHandleDesc} describing the\n * bootstrap method for the {@code invokedynamic}\n * @param invocationName The unqualified name that would appear in the {@code NameAndType}\n * operand of the {@code invokedynamic}\n * @param invocationType a {@link MethodTypeDesc} describing the invocation\n * type that would appear in the {@code NameAndType}\n * operand of the {@code invokedynamic}\n * @param bootstrapArgs {@link ConstantDesc}s describing the static arguments\n * to the bootstrap, that would appear in the\n * {@code BootstrapMethods} attribute\n * @throws NullPointerException if any parameter or its contents are {@code null}\n * @throws IllegalArgumentException if the invocation name has the incorrect\n * format\n * @jvms 4.2.2 Unqualified Names\n */\n private DynamicCallSiteDesc(DirectMethodHandleDesc bootstrapMethod,\n String invocationName,\n MethodTypeDesc invocationType,\n ConstantDesc[] bootstrapArgs) {\n this.invocationName = validateMemberName(requireNonNull(invocationName), true);\n this.invocationType = requireNonNull(invocationType);\n this.bootstrapMethod = requireNonNull(bootstrapMethod);\n this.bootstrapArgs = requireNonNull(bootstrapArgs.clone());\n for (int i = 0; i < this.bootstrapArgs.length; i++) {\n requireNonNull(this.bootstrapArgs[i]);\n }\n if (invocationName.length() == 0)\n throw new IllegalArgumentException(\"Illegal invocation name: \" + invocationName);\n }\n\n /**\n * Creates a nominal descriptor for an {@code invokedynamic} call site.\n *\n * @param bootstrapMethod a {@link DirectMethodHandleDesc} describing the\n * bootstrap method for the {@code invokedynamic}\n * @param invocationName The unqualified name that would appear in the {@code NameAndType}\n * operand of the {@code invokedynamic}\n * @param invocationType a {@link MethodTypeDesc} describing the invocation\n * type that would appear in the {@code NameAndType}\n * operand of the {@code invokedynamic}\n * @param bootstrapArgs {@link ConstantDesc}s describing the static arguments\n * to the bootstrap, that would appear in the\n * {@code BootstrapMethods} attribute\n * @return the nominal descriptor\n * @throws NullPointerException if any parameter or its contents are {@code null}\n * @throws IllegalArgumentException if the invocation name has the incorrect\n * format\n * @jvms 4.2.2 Unqualified Names\n */\n public static DynamicCallSiteDesc of(DirectMethodHandleDesc bootstrapMethod,\n String invocationName,\n MethodTypeDesc invocationType,\n ConstantDesc... bootstrapArgs) {\n return new DynamicCallSiteDesc(bootstrapMethod, invocationName, invocationType, bootstrapArgs);\n }\n\n /**\n * Creates a nominal descriptor for an {@code invokedynamic} call site whose\n * bootstrap method has no static arguments.\n *\n * @param bootstrapMethod The bootstrap method for the {@code invokedynamic}\n * @param invocationName The invocationName that would appear in the\n * {@code NameAndType} operand of the {@code invokedynamic}\n * @param invocationType The invocation invocationType that would appear\n * in the {@code NameAndType} operand of the {@code invokedynamic}\n * @return the nominal descriptor\n * @throws NullPointerException if any parameter is null\n * @throws IllegalArgumentException if the invocation name has the incorrect\n * format\n */\n public static DynamicCallSiteDesc of(DirectMethodHandleDesc bootstrapMethod,\n String invocationName,\n MethodTypeDesc invocationType) {\n return new DynamicCallSiteDesc(bootstrapMethod, invocationName, invocationType, EMPTY_CONSTANTDESC);\n }\n\n /**\n * Creates a nominal descriptor for an {@code invokedynamic} call site whose\n * bootstrap method has no static arguments and for which the name parameter\n * is {@link ConstantDescs#DEFAULT_NAME}.\n *\n * @param bootstrapMethod a {@link DirectMethodHandleDesc} describing the\n * bootstrap method for the {@code invokedynamic}\n * @param invocationType a {@link MethodTypeDesc} describing the invocation\n * type that would appear in the {@code NameAndType}\n * operand of the {@code invokedynamic}\n * @return the nominal descriptor\n * @throws NullPointerException if any parameter is null\n */\n public static DynamicCallSiteDesc of(DirectMethodHandleDesc bootstrapMethod,\n MethodTypeDesc invocationType) {\n return of(bootstrapMethod, ConstantDescs.DEFAULT_NAME, invocationType);\n }\n\n /**\n * Returns a nominal descriptor for an {@code invokedynamic} call site whose\n * bootstrap method, name, and invocation type are the same as this one, but\n * with the specified bootstrap arguments.\n *\n * @param bootstrapArgs {@link ConstantDesc}s describing the static arguments\n * to the bootstrap, that would appear in the\n * {@code BootstrapMethods} attribute\n * @return the nominal descriptor\n * @throws NullPointerException if the argument or its contents are {@code null}\n */\n public DynamicCallSiteDesc withArgs(ConstantDesc... bootstrapArgs) {\n return new DynamicCallSiteDesc(bootstrapMethod, invocationName, invocationType, bootstrapArgs);\n }\n\n /**\n * Returns a nominal descriptor for an {@code invokedynamic} call site whose\n * bootstrap and bootstrap arguments are the same as this one, but with the\n * specified invocationName and invocation invocationType\n *\n * @param invocationName The unqualified name that would appear in the {@code NameAndType}\n * operand of the {@code invokedynamic}\n * @param invocationType a {@link MethodTypeDesc} describing the invocation\n * type that would appear in the {@code NameAndType}\n * operand of the {@code invokedynamic}\n * @return the nominal descriptor\n * @throws NullPointerException if any parameter is null\n * @throws IllegalArgumentException if the invocation name has the incorrect\n * format\n * @jvms 4.2.2 Unqualified Names\n */\n public DynamicCallSiteDesc withNameAndType(String invocationName,\n MethodTypeDesc invocationType) {\n return new DynamicCallSiteDesc(bootstrapMethod, invocationName, invocationType, bootstrapArgs);\n }\n\n /**\n * Returns the invocation name that would appear in the {@code NameAndType}\n * operand of the {@code invokedynamic}.\n *\n * @return the invocation name\n */\n public String invocationName() {\n return invocationName;\n }\n\n /**\n * Returns a {@link MethodTypeDesc} describing the invocation type that\n * would appear in the {@code NameAndType} operand of the {@code invokedynamic}.\n *\n * @return the invocation type\n */\n public MethodTypeDesc invocationType() {\n return invocationType;\n }\n\n /**\n * Returns a {@link MethodHandleDesc} describing the bootstrap method for\n * the {@code invokedynamic}.\n *\n * @return the bootstrap method for the {@code invokedynamic}\n */\n public MethodHandleDesc bootstrapMethod() { return bootstrapMethod; }\n\n /**\n * Returns {@link ConstantDesc}s describing the bootstrap arguments for the\n * {@code invokedynamic}. The returned array is always non-null. A zero\n * length array is returned if this {@linkplain DynamicCallSiteDesc} has no\n * bootstrap arguments.\n *\n * @return the bootstrap arguments for the {@code invokedynamic}\n */\n public ConstantDesc[] bootstrapArgs() { return bootstrapArgs.clone(); }\n\n /**\n * Reflectively invokes the bootstrap method with the specified arguments,\n * and return the resulting {@link CallSite}\n *\n * @param lookup The {@link MethodHandles.Lookup} used to resolve class names\n * @return the {@link CallSite}\n * @throws Throwable if any exception is thrown by the bootstrap method\n */\n public CallSite resolveCallSiteDesc(MethodHandles.Lookup lookup) throws Throwable {\n assert bootstrapMethod.invocationType().parameterType(1).equals(CD_String);\n MethodHandle bsm = (MethodHandle) bootstrapMethod.resolveConstantDesc(lookup);\n Object[] args = new Object[bootstrapArgs.length + 3];\n args[0] = lookup;\n args[1] = invocationName;\n args[2] = invocationType.resolveConstantDesc(lookup);\n System.arraycopy(bootstrapArgs, 0, args, 3, bootstrapArgs.length);\n return (CallSite) bsm.invokeWithArguments(args);\n }\n\n /**\n * Compares the specified object with this descriptor for equality. Returns\n * {@code true} if and only if the specified object is also a\n * {@linkplain DynamicCallSiteDesc}, and both descriptors have equal\n * bootstrap methods, bootstrap argument lists, invocation name, and\n * invocation type.\n *\n * @param o the {@code DynamicCallSiteDesc} to compare to this\n * {@code DynamicCallSiteDesc}\n * @return {@code true} if the specified {@code DynamicCallSiteDesc}\n * is equal to this {@code DynamicCallSiteDesc}.\n */\n @Override\n public final boolean equals(Object o) {\n if (this == o) return true;\n if (o == null || getClass() != o.getClass()) return false;\n DynamicCallSiteDesc specifier = (DynamicCallSiteDesc) o;\n return Objects.equals(bootstrapMethod, specifier.bootstrapMethod) &&\n Arrays.equals(bootstrapArgs, specifier.bootstrapArgs) &&\n Objects.equals(invocationName, specifier.invocationName) &&\n Objects.equals(invocationType, specifier.invocationType);\n }\n\n @Override\n public final int hashCode() {\n int result = Objects.hash(bootstrapMethod, invocationName, invocationType);\n result = 31 * result + Arrays.hashCode(bootstrapArgs);\n return result;\n }\n\n /**\n * Returns a compact textual description of this call site description,\n * including the bootstrap method, the invocation name and type, and\n * the static bootstrap arguments.\n *\n * @return A compact textual description of this call site descriptor\n */\n @Override\n public String toString() {\n return String.format(\"DynamicCallSiteDesc[%s::%s(%s%s):%s]\",\n bootstrapMethod.owner().displayName(),\n bootstrapMethod.methodName(),\n invocationName.equals(ConstantDescs.DEFAULT_NAME) ? \"\" : invocationName + \"/\",\n Stream.of(bootstrapArgs).map(Object::toString).collect(joining(\",\")),\n invocationType.displayDescriptor());\n }\n}\n", "meta": {"content_hash": "c07078b65059e63f64bc4f4f1fd9a656", "timestamp": "", "source": "github", "line_count": 269, "max_line_length": 108, "avg_line_length": 46.95539033457249, "alnum_prop": 0.6559258966035943, "repo_name": "mirkosertic/Bytecoder", "id": "cf6d09c14b396056c3ced98c22382e926ebc807d", "size": "13843", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "classlib/java.base/src/main/resources/META-INF/modules/java.base/classes/java/lang/constant/DynamicCallSiteDesc.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "153"}, {"name": "C++", "bytes": "1301"}, {"name": "CSS", "bytes": "5154"}, {"name": "Clojure", "bytes": "87"}, {"name": "HTML", "bytes": "599386"}, {"name": "Java", "bytes": "106011215"}, {"name": "Kotlin", "bytes": "15858"}, {"name": "LLVM", "bytes": "2839"}, {"name": "Shell", "bytes": "164"}]}} {"text": "module(\"wrapper\", package.seeall)\nlocal json = require(\"cjson.safe\")\n\n-- by default, only call json lib for encoding and decoding\n\nfunction encode(object)\n\treturn json.encode(object)\nend\n\nfunction decode(object)\n\treturn json.decode(object)\nend\n", "meta": {"content_hash": "6d1add8d6dda455f84ea07da89b59215", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 59, "avg_line_length": 20.333333333333332, "alnum_prop": 0.7663934426229508, "repo_name": "paintsnow/luainsight", "id": "f86b6d3bf4614a4ab51e44ded69b59c88789d27c", "size": "244", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "run/lua/wrapper.lua", "mode": "33188", "license": "mit", "language": [{"name": "Common Lisp", "bytes": "973"}, {"name": "Lua", "bytes": "4774"}]}} {"text": "package pl.darknessNight.AutoUpdateLauncher;\n\nimport org.apache.commons.net.ftp.FTP;\nimport org.apache.commons.net.ftp.FTPReply;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\n\nclass LoginException extends RuntimeException{}\n\npublic class FTPClient {\n org.apache.commons.net.ftp.FTPClient ftpLowLevelClient = null;\n FTPClient(){\n ftpLowLevelClient =new org.apache.commons.net.ftp.FTPClient();\n }\n FTPClient(org.apache.commons.net.ftp.FTPClient client) {\n ftpLowLevelClient =client;\n }\n\n public void RetrieveDataToStreamFromUrl(String url, OutputStream stream) throws IOException{\n ConnectToServerFromUrl(url);\n String filename=getFilenameFromUrl(url);\n RetrieveFile(filename,stream);\n Disconnect();\n }\n\n private static String getFilenameFromUrl(String url){\n return new String();\n }\n\n public void ConnectToServerFromUrl(String url) throws IOException {\n String host=getHostFromUrl(url);\n String user=getUserFromUrl(url);\n String pass=getPassFromUrl(url);\n if(user.isEmpty())\n ConnectToServerAnonymousAndPrepareConnection(host);\n else\n ConnectToServerAndPrepareConnection(host,user,pass);\n }\n\n private static String getHostFromUrl(String url){\n return new String();\n }\n\n private static String getUserFromUrl(String url){\n return new String();\n }\n\n private static String getPassFromUrl(String url){\n return new String();\n }\n\n public void ConnectToServerAndPrepareConnection(String host, String user, String pwd) throws IOException {\n ConnectToServer(host);\n LoginToServer(user, pwd);\n PrepareConnection();\n }\n\n private void LoginToServer(String user, String pwd) throws IOException {\n if(!ftpLowLevelClient.login(user, pwd))\n throw new LoginException();\n }\n\n public void ConnectToServer(String host) throws IOException {\n int reply;\n ftpLowLevelClient.connect(host);\n reply = ftpLowLevelClient.getReplyCode();\n if (!FTPReply.isPositiveCompletion(reply)) {\n ftpLowLevelClient.disconnect();\n throw new IOException(\"Exception in connecting to FTP Server\");\n }\n }\n\n public void ConnectToServerAnonymousAndPrepareConnection(String host) throws IOException {\n ConnectToServer(host);\n PrepareConnection();\n }\n\n private void PrepareConnection() throws IOException {\n ftpLowLevelClient.setFileType(FTP.BINARY_FILE_TYPE);\n ftpLowLevelClient.enterLocalPassiveMode();\n }\n\n public void RetrieveFile(String remoteFilePath, OutputStream stream) throws IOException {\n this.ftpLowLevelClient.retrieveFile(remoteFilePath, stream);\n }\n\n public void Disconnect() {\n if (ftpLowLevelClient.isConnected()) {\n try {\n ftpLowLevelClient.logout();\n } catch (IOException f) {\n //pass\n }\n try {\n ftpLowLevelClient.disconnect();\n } catch (IOException f) {\n //pass\n }\n }\n }\n}\n", "meta": {"content_hash": "525da33418b41da4939c7048b448f470", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 110, "avg_line_length": 30.833333333333332, "alnum_prop": 0.6597774244833068, "repo_name": "darknessNight/AutoUpdateLauncher", "id": "443966e8f5e556196345c5583e92f2f4a55dec01", "size": "3145", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "AutoUpdateLauncher/src/main/java/pl/darknessNight/AutoUpdateLauncher/FTPClient.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "33976"}]}} {"text": "\ufeffusing System;\r\nusing System.Linq;\r\nusing System.Threading.Tasks;\r\nusing ECommon.Components;\r\nusing ECommon.Dapper;\r\nusing ECommon.IO;\r\nusing ECommon.Logging;\r\nusing ECommon.Utilities;\r\nusing ENode.Eventing;\r\nusing ENode.Infrastructure;\r\nusing MySql.Data.MySqlClient;\r\n\r\nnamespace ENode.MySQL\r\n{\r\n public class MySqlPublishedVersionStore : IPublishedVersionStore\r\n {\r\n private const string EventSingleTableNameFormat = \"`{0}`\";\r\n private const string EventTableNameFormat = \"`{0}_{1}`\";\r\n\r\n #region Private Variables\r\n\r\n private string _connectionString;\r\n private string _tableName;\r\n private int _tableCount;\r\n private string _uniqueIndexName;\r\n private ILogger _logger;\r\n private ITimeProvider _timeProvider;\r\n private IOHelper _ioHelper;\r\n\r\n #endregion\r\n\r\n public MySqlPublishedVersionStore Initialize(string connectionString, string tableName = \"PublishedVersion\", int tableCount = 1, string uniqueIndexName = \"IX_PublishedVersion_AggId_Version\")\r\n {\r\n _connectionString = connectionString;\r\n _tableName = tableName;\r\n _tableCount = tableCount;\r\n _uniqueIndexName = uniqueIndexName;\r\n\r\n Ensure.NotNull(_connectionString, \"_connectionString\");\r\n Ensure.NotNull(_tableName, \"_tableName\");\r\n Ensure.Positive(_tableCount, \"_tableCount\");\r\n Ensure.NotNull(_uniqueIndexName, \"_uniqueIndexName\");\r\n\r\n _logger = ObjectContainer.Resolve().Create(GetType().FullName);\r\n _timeProvider = ObjectContainer.Resolve();\r\n _ioHelper = ObjectContainer.Resolve();\r\n\r\n return this;\r\n }\r\n public async Task UpdatePublishedVersionAsync(string processorName, string aggregateRootTypeName, string aggregateRootId, int publishedVersion)\r\n {\r\n await _ioHelper.TryIOActionAsync(async () =>\r\n {\r\n if (publishedVersion == 1)\r\n {\r\n try\r\n {\r\n using (var connection = GetConnection())\r\n {\r\n var currentTime = _timeProvider.GetCurrentTime();\r\n await connection.OpenAsync().ConfigureAwait(false);\r\n await connection.InsertAsync(new\r\n {\r\n ProcessorName = processorName,\r\n AggregateRootTypeName = aggregateRootTypeName,\r\n AggregateRootId = aggregateRootId,\r\n Version = 1,\r\n CreatedOn = currentTime,\r\n UpdatedOn = currentTime\r\n }, GetTableName(aggregateRootId)).ConfigureAwait(false);\r\n }\r\n }\r\n catch (MySqlException ex)\r\n {\r\n if (ex.Number == 1062 && ex.Message.Contains(_uniqueIndexName))\r\n {\r\n return;\r\n }\r\n var errorMessage = string.Format(\"Insert aggregate published version has sql exception, aggregateRootType: {0}, aggregateRootId: {1}\", aggregateRootTypeName, aggregateRootId);\r\n _logger.Error(errorMessage, ex);\r\n throw;\r\n }\r\n catch (Exception ex)\r\n {\r\n var errorMessage = string.Format(\"Insert aggregate published version has unknown exception, aggregateRootType: {0}, aggregateRootId: {1}\", aggregateRootTypeName, aggregateRootId);\r\n _logger.Error(errorMessage, ex);\r\n throw;\r\n }\r\n }\r\n else\r\n {\r\n try\r\n {\r\n using (var connection = GetConnection())\r\n {\r\n await connection.OpenAsync().ConfigureAwait(false);\r\n await connection.UpdateAsync(\r\n new\r\n {\r\n Version = publishedVersion,\r\n UpdatedOn = _timeProvider.GetCurrentTime()\r\n },\r\n new\r\n {\r\n ProcessorName = processorName,\r\n AggregateRootId = aggregateRootId,\r\n Version = publishedVersion - 1\r\n }, GetTableName(aggregateRootId)).ConfigureAwait(false);\r\n }\r\n }\r\n catch (MySqlException ex)\r\n {\r\n var errorMessage = string.Format(\"Update aggregate published version has sql exception, aggregateRootType: {0}, aggregateRootId: {1}\", aggregateRootTypeName, aggregateRootId);\r\n _logger.Error(errorMessage, ex);\r\n throw;\r\n }\r\n catch (Exception ex)\r\n {\r\n var errorMessage = string.Format(\"Update aggregate published version has unknown exception, aggregateRootType: {0}, aggregateRootId: {1}\", aggregateRootTypeName, aggregateRootId);\r\n _logger.Error(errorMessage, ex);\r\n throw;\r\n }\r\n }\r\n }, \"UpdatePublishedVersionAsync\");\r\n }\r\n public async Task GetPublishedVersionAsync(string processorName, string aggregateRootTypeName, string aggregateRootId)\r\n {\r\n return await _ioHelper.TryIOFuncAsync(async () =>\r\n {\r\n try\r\n {\r\n using (var connection = GetConnection())\r\n {\r\n await connection.OpenAsync().ConfigureAwait(false);\r\n var result = await connection.QueryListAsync(new\r\n {\r\n ProcessorName = processorName,\r\n AggregateRootId = aggregateRootId\r\n }, GetTableName(aggregateRootId), \"Version\").ConfigureAwait(false);\r\n return result.SingleOrDefault();\r\n }\r\n }\r\n catch (MySqlException ex)\r\n {\r\n var errorMessage = string.Format(\"Get aggregate published version has sql exception, aggregateRootType: {0}, aggregateRootId: {1}\", aggregateRootTypeName, aggregateRootId);\r\n _logger.Error(errorMessage, ex);\r\n throw;\r\n }\r\n catch (Exception ex)\r\n {\r\n var errorMessage = string.Format(\"Get aggregate published version has unknown exception, aggregateRootType: {0}, aggregateRootId: {1}\", aggregateRootTypeName, aggregateRootId);\r\n _logger.Error(errorMessage, ex);\r\n throw;\r\n }\r\n }, \"GetPublishedVersionAsync\");\r\n }\r\n\r\n private int GetTableIndex(string aggregateRootId)\r\n {\r\n int hash = 23;\r\n foreach (char c in aggregateRootId)\r\n {\r\n hash = (hash << 5) - hash + c;\r\n }\r\n if (hash < 0)\r\n {\r\n hash = Math.Abs(hash);\r\n }\r\n return hash % _tableCount;\r\n }\r\n private string GetTableName(string aggregateRootId)\r\n {\r\n if (_tableCount <= 1)\r\n {\r\n return string.Format(EventSingleTableNameFormat, _tableName);\r\n }\r\n\r\n var tableIndex = GetTableIndex(aggregateRootId);\r\n\r\n return string.Format(EventTableNameFormat, _tableName, tableIndex);\r\n }\r\n private MySqlConnection GetConnection()\r\n {\r\n return new MySqlConnection(_connectionString);\r\n }\r\n }\r\n}\r\n", "meta": {"content_hash": "c233f89332dc3bfb0ef1a1df06d23501", "timestamp": "", "source": "github", "line_count": 187, "max_line_length": 203, "avg_line_length": 43.94652406417112, "alnum_prop": 0.4930640058408372, "repo_name": "tangxuehua/enode", "id": "53854d4f526deae3e922635a002e225481ad539c", "size": "8220", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/ENode.MySQL/MySqlPublishedVersionStore.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "570099"}, {"name": "TSQL", "bytes": "1681"}]}} {"text": "package com.wuyin.supermarket.httpresult;\n\nimport com.wuyin.supermarket.httpresult.base.BaseHttpRequest;\nimport com.wuyin.supermarket.model.AppInfo;\n\nimport org.json.JSONArray;\nimport org.json.JSONException;\nimport org.json.JSONObject;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n/**\n * Created by yinlong on 2016/5/11.\n */\npublic class DetailHttpRequest extends BaseHttpRequest {\n\n String packageName;\n\n public DetailHttpRequest(String packageName) {\n this.packageName = packageName;\n }\n\n @Override\n public AppInfo parseJson(String results) {\n\n try {\n JSONObject result = new JSONObject(results);\n String id = result.getString(\"id\");\n String name = result.getString(\"name\");\n String packageName = result.getString(\"packageName\");\n String iconUrl = result.getString(\"iconUrl\");\n float stars = result.getLong(\"stars\");\n long size = result.getLong(\"size\");\n String downLoadUrl = result.getString(\"downloadUrl\");\n String des = result.getString(\"des\");\n\n String downloadNum = result.getString(\"downloadNum\");\n String version = result.getString(\"version\");\n String date = result.getString(\"date\");\n String author = result.getString(\"author\");\n\n List screen = new ArrayList<>();\n JSONArray screenArray = result.getJSONArray(\"screen\");\n for (int i = 0; i < screenArray.length(); i++) {\n screen.add(screenArray.getString(i));\n }\n\n List safeUrl = new ArrayList<>();\n List safeDesUrl = new ArrayList<>();\n List safeDes = new ArrayList<>();\n List safeDesColor = new ArrayList<>();\n JSONArray safeUrlArray = result.getJSONArray(\"safe\");\n for (int i = 0; i < safeUrlArray.length(); i++) {\n JSONObject jsonObject = safeUrlArray.getJSONObject(i);\n safeUrl.add(jsonObject.getString(\"safeUrl\"));\n safeDesUrl.add(jsonObject.getString(\"safeDesUrl\"));\n safeDes.add(jsonObject.getString(\"safeDes\"));\n safeDesColor.add(jsonObject.getInt(\"safeDesColor\"));\n }\n\n AppInfo appInfo = new AppInfo(id, name, packageName, iconUrl, stars, size, downLoadUrl, des,\n downloadNum, version, date, author, screen,\n safeUrl, safeDesUrl, safeDes, safeDesColor);\n\n return appInfo;\n } catch (JSONException e) {\n e.printStackTrace();\n return null;\n }\n }\n\n @Override\n public String getKey() {\n return \"detail\";\n }\n\n /**\n * \u989d\u5916\u5e26\u7684\u53c2\u6570\n *\n * @return\n */\n @Override\n public String getParams() {\n return \"&packageName=\" + packageName;\n }\n}\n", "meta": {"content_hash": "354dff053eb2155a5581ef3414b7f243", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 104, "avg_line_length": 33.10344827586207, "alnum_prop": 0.5954861111111112, "repo_name": "wuyinlei/SuperMarket", "id": "88cfe134012b38ecdb5e26f4757c018e8dc8b213", "size": "2892", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/wuyin/supermarket/httpresult/DetailHttpRequest.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Groff", "bytes": "1625"}, {"name": "Java", "bytes": "173264"}]}} {"text": "\ufeffusing System;\n\nnamespace Tasks\n{\n\tpublic interface IConsole\n\t{\n\t\tstring ReadLine();\n\n\t\tvoid Write(string format, params object[] args);\n\n\t\tvoid WriteLine(string format, params object[] args);\n\n\t\tvoid WriteLine();\n\t}\n}\n", "meta": {"content_hash": "2b2314003fa642ad3a768aa5c9cd5761", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 54, "avg_line_length": 14.6, "alnum_prop": 0.6986301369863014, "repo_name": "codurance/task-list", "id": "05fc18cbe670b34a4360ad8eb5adba8da46d53a4", "size": "221", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "csharp/Tasks/IConsole.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "11356"}, {"name": "Go", "bytes": "8897"}, {"name": "Java", "bytes": "12921"}, {"name": "Kotlin", "bytes": "3918"}, {"name": "Python", "bytes": "7146"}, {"name": "Ruby", "bytes": "5730"}, {"name": "Scala", "bytes": "5911"}, {"name": "Shell", "bytes": "531"}, {"name": "TypeScript", "bytes": "8608"}]}} {"text": "\npackage business;\n\nimport business.category.CategoryDao;\nimport business.category.CategoryDaoGuava;\nimport business.category.CategoryDaoJdbc;\nimport business.category.CategoryService;\nimport business.category.DefaultCategoryService;\nimport business.customer.CustomerDao;\nimport business.customer.CustomerDaoJdbc;\nimport business.customer.CustomerService;\nimport business.customer.DefaultCustomerService;\nimport business.order.CustomerOrderDao;\nimport business.order.CustomerOrderDaoJdbc;\nimport business.order.CustomerOrderLineItemDao;\nimport business.order.CustomerOrderLineItemDaoJdbc;\nimport business.order.CustomerOrderService;\nimport business.order.DefaultCustomerOrderService;\nimport business.product.DefaultProductService;\nimport business.product.ProductDao;\nimport business.product.ProductDaoGuava;\nimport business.product.ProductDaoJdbc;\nimport business.product.ProductService;\nimport java.util.concurrent.ScheduledExecutorService;\nimport java.util.concurrent.ScheduledThreadPoolExecutor;\nimport java.util.concurrent.TimeUnit;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n */\npublic final class ApplicationContext {\n\n private final Logger logger = LoggerFactory.getLogger(getClass());\n\n private ProductService productService;\n\n private CategoryService categoryService;\n\n private CustomerService customerService;\n\n private CustomerOrderService customerOrderService;\n\n private ScheduledExecutorService executorService;\n\n public static ApplicationContext INSTANCE = new ApplicationContext();\n\n private ApplicationContext() {\n\n executorService = new ScheduledThreadPoolExecutor(Runtime.getRuntime().availableProcessors());\n\n // wire up the business.dao layer \"by hand\"\n ProductDao productDao = new ProductDaoJdbc();\n\n ProductDaoGuava cachedProductDao = new ProductDaoGuava(productDao);\n\n productService = new DefaultProductService();\n ((DefaultProductService) productService).setProductDao(cachedProductDao);\n\n CategoryDao categoryDao = new CategoryDaoJdbc();\n\n CategoryDaoGuava cachedCategoryDao = new CategoryDaoGuava(categoryDao);\n\n categoryService = new DefaultCategoryService();\n ((DefaultCategoryService) categoryService).setCategoryDao(cachedCategoryDao);\n\n CustomerDao customerDao = new CustomerDaoJdbc();\n customerService = new DefaultCustomerService();\n ((DefaultCustomerService) customerService).setCustomerDao(customerDao);\n\n CustomerOrderLineItemDao customerOrderLineItemDao = new CustomerOrderLineItemDaoJdbc();\n CustomerOrderDao customerOrderDao = new CustomerOrderDaoJdbc();\n\n customerOrderService = new DefaultCustomerOrderService();\n DefaultCustomerOrderService service = (DefaultCustomerOrderService) customerOrderService;\n service.setProductDao(cachedProductDao);\n service.setCustomerService(customerService);\n service.setCustomerOrderDao(customerOrderDao);\n service.setCustomerOrderLineItemDao(customerOrderLineItemDao);\n\n executorService.scheduleWithFixedDelay(() -> {\n try {\n logger.info(\"Refreshing category and product caches....commencing\");\n cachedCategoryDao.bulkload();\n cachedProductDao.clear();\n logger.info(\"Refreshing category and product caches....complete!\");\n } catch (Throwable t) {\n logger.error(\"Encountered trouble refreshing category and product caches.\", t);\n }\n }, 10, 60, TimeUnit.MINUTES);\n }\n\n\n\n public ProductService getProductService() {\n return productService;\n }\n\n public CategoryService getCategoryService() {\n return categoryService;\n }\n\n public CustomerService getCustomerService() {\n return customerService;\n }\n\n public CustomerOrderService getCustomerOrderService() {\n return customerOrderService;\n }\n\n public void shutdown() {\n executorService.shutdown();\n }\n}\n", "meta": {"content_hash": "2e5e5491b83058a8f3e193d14d4514b9", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 102, "avg_line_length": 35.41228070175438, "alnum_prop": 0.7485756750061927, "repo_name": "nowucca/SimpleAffableBean", "id": "94fc291274aed1406f2df203c0ba6f1c017344ae", "size": "5660", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/business/ApplicationContext.java", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "9504"}, {"name": "Java", "bytes": "343995"}, {"name": "JavaScript", "bytes": "4976"}]}} {"text": "package org.zstack.sdk.iam2.api;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport org.zstack.sdk.*;\n\npublic class AddAttributesToIAM2VirtualIDGroupAction extends AbstractAction {\n\n private static final HashMap parameterMap = new HashMap<>();\n\n private static final HashMap nonAPIParameterMap = new HashMap<>();\n\n public static class Result {\n public ErrorCode error;\n public org.zstack.sdk.iam2.api.AddAttributesToIAM2VirtualIDGroupResult value;\n\n public Result throwExceptionIfError() {\n if (error != null) {\n throw new ApiException(\n String.format(\"error[code: %s, description: %s, details: %s]\", error.code, error.description, error.details)\n );\n }\n \n return this;\n }\n }\n\n @Param(required = true, nonempty = false, nullElements = false, emptyString = true, noTrim = false)\n public java.lang.String uuid;\n\n @Param(required = true, nonempty = true, nullElements = false, emptyString = true, noTrim = false)\n public java.util.List attributes;\n\n @Param(required = false)\n public java.util.List systemTags;\n\n @Param(required = false)\n public java.util.List userTags;\n\n @Param(required = false)\n public String sessionId;\n\n @Param(required = false)\n public String accessKeyId;\n\n @Param(required = false)\n public String accessKeySecret;\n\n @Param(required = false)\n public String requestIp;\n\n @NonAPIParam\n public long timeout = -1;\n\n @NonAPIParam\n public long pollingInterval = -1;\n\n\n private Result makeResult(ApiResult res) {\n Result ret = new Result();\n if (res.error != null) {\n ret.error = res.error;\n return ret;\n }\n \n org.zstack.sdk.iam2.api.AddAttributesToIAM2VirtualIDGroupResult value = res.getResult(org.zstack.sdk.iam2.api.AddAttributesToIAM2VirtualIDGroupResult.class);\n ret.value = value == null ? new org.zstack.sdk.iam2.api.AddAttributesToIAM2VirtualIDGroupResult() : value; \n\n return ret;\n }\n\n public Result call() {\n ApiResult res = ZSClient.call(this);\n return makeResult(res);\n }\n\n public void call(final Completion completion) {\n ZSClient.call(this, new InternalCompletion() {\n @Override\n public void complete(ApiResult res) {\n completion.complete(makeResult(res));\n }\n });\n }\n\n protected Map getParameterMap() {\n return parameterMap;\n }\n\n protected Map getNonAPIParameterMap() {\n return nonAPIParameterMap;\n }\n\n protected RestInfo getRestInfo() {\n RestInfo info = new RestInfo();\n info.httpMethod = \"POST\";\n info.path = \"/iam2/projects/groups/{uuid}/attributes\";\n info.needSession = true;\n info.needPoll = true;\n info.parameterName = \"params\";\n return info;\n }\n\n}\n", "meta": {"content_hash": "09c352ce3b0910ad195b63ff810302eb", "timestamp": "", "source": "github", "line_count": 104, "max_line_length": 165, "avg_line_length": 28.951923076923077, "alnum_prop": 0.6343407505812022, "repo_name": "zstackorg/zstack", "id": "a5bb512701d861945cdf0827326af451608d0112", "size": "3011", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "sdk/src/main/java/org/zstack/sdk/iam2/api/AddAttributesToIAM2VirtualIDGroupAction.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "AspectJ", "bytes": "54952"}, {"name": "Batchfile", "bytes": "1132"}, {"name": "Groovy", "bytes": "832169"}, {"name": "Java", "bytes": "15798995"}, {"name": "Shell", "bytes": "152829"}]}} {"text": "FROM balenalib/surface-go-fedora:32-build\n\n# http://bugs.python.org/issue19846\n# > At the moment, setting \"LANG=C\" on a Linux system *fundamentally breaks Python 3*, and that's not OK.\nENV LANG C.UTF-8\n\nRUN dnf install -y \\\n\t\tpython-pip \\\n\t\tpython-dbus \\\n\t&& dnf clean all\n\n# install \"virtualenv\", since the vast majority of users of this image will want it\nRUN pip install -U --no-cache-dir --ignore-installed pip setuptools \\\n\t&& pip install --no-cache-dir virtualenv\n\nRUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'As of January 1st, 2020, Python 2 was end-of-life, we will change the latest tag for Balenalib Python base image to Python 3.x and drop support for Python 2 soon. So after 1st July, 2020, all the balenalib Python latest tag will point to the latest Python 3 version and no changes, or fixes will be made to balenalib Python 2 base image. If you are using Python 2 for your application, please upgrade to Python 3 before 1st July.' > /.balena/messages/python-deprecation-warning\n\nCMD [\"echo\",\"'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs\"]\n\n RUN curl -SLO \"https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@python.sh\" \\\n && echo \"Running test-stack@python\" \\\n && chmod +x test-stack@python.sh \\\n && bash test-stack@python.sh \\\n && rm -rf test-stack@python.sh \n\nRUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \\nArchitecture: Intel 64-bit (x86-64) \\nOS: Fedora 32 \\nVariant: build variant \\nDefault variable(s): UDEV=off \\nThe following software stack is preinstalled: \\nPython v2.7.18, Pip v21.0.1, Setuptools v56.0.0 \\nExtra features: \\n- Easy way to install packages with `install_packages ` command \\n- Run anywhere with cross-build feature (for ARM only) \\n- Keep the container idling with `balena-idle` command \\n- Show base image details with `balena-info` command' > /.balena/messages/image-info\n\nRUN echo $'#!/bin/sh.real\\nbalena-info\\nrm -f /bin/sh\\ncp /bin/sh.real /bin/sh\\n/bin/sh \"$@\"' > /bin/sh-shim \\\n\t&& chmod +x /bin/sh-shim \\\n\t&& cp /bin/sh /bin/sh.real \\\n\t&& mv /bin/sh-shim /bin/sh", "meta": {"content_hash": "ddfcb8d79b3743fee522f3d7d4815a8f", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 723, "avg_line_length": 78.3225806451613, "alnum_prop": 0.7327018121911038, "repo_name": "nghiant2710/base-images", "id": "b2d05371eeaac3c8ba49fecf515ff9924ba7265c", "size": "2449", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/python/surface-go/fedora/32/2.7.18/build/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Dockerfile", "bytes": "144558581"}, {"name": "JavaScript", "bytes": "16316"}, {"name": "Shell", "bytes": "368690"}]}} {"text": "/* IBM_PROLOG_BEGIN_TAG */\n/* This is an automatically generated prolog. */\n/* */\n/* $Source: src/import/generic/memory/lib/utils/omi/gen_omi_traits.H $ */\n/* */\n/* OpenPOWER HostBoot Project */\n/* */\n/* Contributors Listed Below - COPYRIGHT 2019,2022 */\n/* [+] International Business Machines Corp. */\n/* */\n/* */\n/* Licensed under the Apache License, Version 2.0 (the \"License\"); */\n/* you may not use this file except in compliance with the License. */\n/* You may obtain a copy of the License at */\n/* */\n/* http://www.apache.org/licenses/LICENSE-2.0 */\n/* */\n/* Unless required by applicable law or agreed to in writing, software */\n/* distributed under the License is distributed on an \"AS IS\" BASIS, */\n/* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or */\n/* implied. See the License for the specific language governing */\n/* permissions and limitations under the License. */\n/* */\n/* IBM_PROLOG_END_TAG */\n\n///\n/// @file gen_omi_traits.H\n/// @brief Generic Traits file for OMI code\n///\n// *HWP HWP Owner: Geetha Pisapati \n// *HWP HWP Backup: Louis Stermole \n// *HWP Team: Memory\n// *HWP Level: 3\n// *HWP Consumed by: HB:FSP\n// EKB-Mirror-To: hostboot\n\n#ifndef _GEN_OMI_TRAITS_H_\n#define _GEN_OMI_TRAITS_H_\n\n#include \n\nnamespace mss\n{\nnamespace omi\n{\n///\n/// @class omiTraits\n/// @tparam MC the mc type\n/// @brief A MC to MC_TARGET_TYPE mapping\n///\ntemplate< mss::mc_type MC >\nclass omiTraits;\n\n\n} // end omi ns\n\n} // end mss ns\n#endif\n", "meta": {"content_hash": "e35ec671ba774cc82c7fc6c525b0dd9b", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 76, "avg_line_length": 41.93103448275862, "alnum_prop": 0.41776315789473684, "repo_name": "open-power/hostboot", "id": "1b773aebb7d2f240ab1dce4ae6d8ebe4ae9ea040", "size": "2432", "binary": false, "copies": "1", "ref": "refs/heads/master-p10", "path": "src/import/generic/memory/lib/utils/omi/gen_omi_traits.H", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Assembly", "bytes": "84276"}, {"name": "C", "bytes": "29945981"}, {"name": "C++", "bytes": "126613976"}, {"name": "CMake", "bytes": "1852"}, {"name": "Lex", "bytes": "8996"}, {"name": "M4", "bytes": "5738"}, {"name": "Makefile", "bytes": "772285"}, {"name": "Meson", "bytes": "23911"}, {"name": "Perl", "bytes": "2605582"}, {"name": "Python", "bytes": "2602753"}, {"name": "Shell", "bytes": "290164"}, {"name": "Tcl", "bytes": "76031"}, {"name": "XSLT", "bytes": "9553"}, {"name": "Yacc", "bytes": "29440"}]}} {"text": "value;\r\n }\r\n}\r\n", "meta": {"content_hash": "d0f1135f8efd4553b6283dbcafed1f77", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 81, "avg_line_length": 20.25, "alnum_prop": 0.6313932980599647, "repo_name": "OtsList/OtsList.eu-AAC-for-OpenTibia", "id": "42ec55d4e50e1931c623413dca441e207231e652", "size": "900", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "vendor/ZF2/library/Zend/Form/Annotation/Hydrator.php", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "PHP", "bytes": "464533"}]}} {"text": "[![standard-readme compliant](https://img.shields.io/badge/readme%20style-standard-brightgreen.svg)](https://github.com/RichardLitt/standard-readme)\n[![Gitter](https://img.shields.io/gitter/room/nwjs/nw.js.svg)][Gitter]\n[![Releases](https://img.shields.io/github/downloads/atom/atom/total.svg)][Releases]\n\n> Ethereum miner with OpenCL, CUDA and stratum support\n\nThe ethminer is an Ethereum GPU mining worker. This is the actively maintained version of ethminer. It originates from [cpp-ethereum] project (where GPU mining has been discontinued) and builds on the improvements made in [Genoil's fork]. See [FAQ](#faq) for more details.\n\n### Features\n\n- OpenCL mining\n- Nvidia CUDA mining\n- realistic benchmarking against arbitrary epoch/DAG/blocknumber\n- on-GPU DAG generation (no more DAG files on disk)\n- stratum mining without proxy\n- OpenCL devices picking\n- farm failover (getwork + stratum)\n\n\n## Table of Contents\n\n- [Install](#install)\n- [Usage](#usage)\n- [Build](#build)\n - [Continuous Integration and development builds](#continuous-integration-and-development-builds)\n - [Building from source](#building-from-source)\n - [CMake configuration options](#cmake-configuration-options)\n- [Maintainer](#maintainer) \n- [Contribute](#contribute)\n- [F.A.Q.](#faq)\n\n\n## Install\n\n[![Releases](https://img.shields.io/github/downloads/atom/atom/total.svg)][Releases]\n\nStandalone **executables** for _Linux_, _macOS_ and _Windows_ are provided in\nthe [Releases] section.\nDownload an archive for your operating system and unpack the content to a place\naccessible from command line. The ethminer is ready to go.\n\n\n## Usage\n\nThe **ethminer** is a command line program. This means you launch it either\nfrom a Windows command prompt or Linux console, or create shortcuts to\npredefined command lines using a Linux Bash script or Windows batch/cmd file.\nFor a full list of available command, please run\n\n```sh\nethminer --help\n```\n\n\n## Build\n\n### Continuous Integration and development builds\n\n| CI | OS | Status | Development builds |\n| ------------- | ------------- | ----- | ----------------- |\n| [Travis CI] | Linux, macOS | [![Travis CI](https://img.shields.io/travis/ethereum-mining/ethminer.svg)][Travis CI] | \u2717 No build artifacts, [Amazon S3 is needed] for this |\n| [AppVeyor] | Windows | [![AppVeyor](https://img.shields.io/appveyor/ci/ethereum-mining/ethminer.svg)][AppVeyor] | \u2713 Build artifacts available for all PRs and branches |\n\nThe AppVeyor system automatically builds a Windows .exe for every commit. The latest version is always available [on the landing page](https://ci.appveyor.com/project/ethereum-mining/ethminer) or you can [browse the history](https://ci.appveyor.com/project/ethereum-mining/ethminer/history) to access previous builds.\n\nTo download the .exe on a build under 'JOB NAME' select 'Configuration: Release', choose 'ARTIFACTS' then download the zip file.\n\n\n### Building from source\n\nThis project uses [CMake] and [Hunter] package manager.\n\n1. Create a build directory.\n\n ```sh\n mkdir build; cd build\n ```\n\n2. Configure the project with CMake. Check out additional\n [configuration options](#cmake-configuration-options).\n\n ```sh\n cmake ..\n ```\n\n3. Build the project using [CMake Build Tool Mode]. This is a portable variant\n of `make`.\n\n ```sh\n cmake --build .\n ```\n\n4. _(Optional, Linux only)_ Install the built executable.\n\n ```sh\n sudo make install\n ```\n\n#### OpenCL support on Linux\n\nIf you're planning to use [OpenCL on Linux](https://github.com/ruslo/hunter/wiki/pkg.opencl#pitfalls)\nyou have to install OpenGL libraries. E.g. on Ubuntu run:\n\n```sh\nsudo apt-get install mesa-common-dev\n```\n\n#### Disable Hunter\n\nIf you want to install dependencies yourself or use system package manager\nyou can disable Hunter by adding\n[-DHUNTER_ENABLED=OFF](https://docs.hunter.sh/en/latest/reference/user-variables.html#hunter-enabled)\nto configuration options.\n\n### CMake configuration options\n\nPass these options to CMake configuration command, e.g.\n\n```sh\ncmake .. -DETHASHCUDA=ON -DETHASHCL=OFF\n```\n\n- `-DETHASHCL=ON` - enable OpenCL mining, `ON` by default,\n- `-DETHASHCUDA=ON` - enable CUDA mining, `OFF` by default,\n- `-DETHSTRATUM=ON` - build with Stratum protocol support, `ON` by default.\n\n\n## Maintainer\n\n[![Gitter](https://img.shields.io/gitter/room/ethereum-mining/ethminer.svg)][Gitter]\n\n- Pawe\u0142 Bylica [@chfast](https://github.com/chfast)\n\n\n## Contribute\n\n[![Gitter](https://img.shields.io/gitter/room/ethereum-mining/ethminer.svg)][Gitter]\n\nTo meet the community, ask general questions and chat about ethminer join [the ethminer channel on Gitter][Gitter].\n\nAll bug reports, pull requests and code reviews are very much welcome.\n\n\n## F.A.Q\n\n1. Why is my hashrate with Nvidia cards on Windows 10 so low?\n\n The new WDDM 2.x driver on Windows 10 uses a different way of addressing the GPU. This is good for a lot of things, but not for ETH mining.\n For Kepler GPUs: I actually don't know. Please let me know what works best for good old Kepler.\n For Maxwell 1 GPUs: Unfortunately the issue is a bit more serious on the GTX750Ti, already causing suboptimal performance on Win7 and Linux. Apparently about 4MH/s can still be reached on Linux, which, depending on ETH price, could still be profitable, considering the relatively low power draw.\n For Maxwell 2 GPUs: There is a way of mining ETH at Win7/8/Linux speeds on Win10, by downgrading the GPU driver to a Win7 one (350.12 recommended) and using a build that was created using CUDA 6.5.\n For Pascal GPUs: You have to use the latest WDDM 2.1 compatible drivers in combination with Windows 10 Anniversary edition in order to get the full potential of your Pascal GPU.\n\n2. Why is a GTX 1080 slower than a GTX 1070?\n\n Because of the GDDR5X memory, which can't be fully utilized for ETH mining (yet).\n\n3. Are AMD cards also affected by slowdowns with increasing DAG size?\n\n Only GCN 1.0 GPUs (78x0, 79x0, 270, 280), but in a different way. You'll see that on each new epoch (30K blocks), the hashrate will go down a little bit.\n\n4. Can I still mine ETH with my 2GB GPU?\n\n No.\n\n5. What are the optimal launch parameters?\n\n The default parameters are fine in most scenario's (CUDA). For OpenCL it varies a bit more. Just play around with the numbers and use powers of 2. GPU's like powers of 2.\n \n6. What does the ```--cuda-parallel-hash``` flag do?\n\n @davilizh made improvements to the CUDA kernel hashing process and added this flag to allow changing the number of tasks it runs in parallel. These improvements were optimised for GTX 1060 GPUs which saw a large increase in hashrate, GTX 1070 and GTX 1080/Ti GPUs saw some, but less, improvement. The default value is 4 (which does not need to be set with the flag) and in most cases this will provide the best performance.\n\n7. What is ethminer's relationship with [Genoil's fork]?\n\n [Genoil's fork] was the original source of this version, but as Genoil is no longer consistently maintaining that fork it became almost impossible for developers to get new code merged there. In the interests of progressing development without waiting for reviews this fork should be considered the active one and Genoil's as legacy code.\n\n\n\n\n\n[Amazon S3 is needed]: https://docs.travis-ci.com/user/uploading-artifacts/\n[AppVeyor]: https://ci.appveyor.com/project/ethereum-mining/ethminer\n[CMake]: https://cmake.org\n[CMake Build Tool Mode]: https://cmake.org/cmake/help/latest/manual/cmake.1.html#build-tool-mode\n[cpp-ethereum]: https://github.com/ethereum/cpp-ethereum\n[Genoil's fork]: https://github.com/Genoil/cpp-ethereum\n[Gitter]: https://gitter.im/ethereum-mining/ethminer\n[Hunter]: https://docs.hunter.sh\n[Releases]: https://github.com/ethereum-mining/ethminer/releases\n[Travis CI]: https://travis-ci.org/ethereum-mining/ethminer\n", "meta": {"content_hash": "7bc9a67494e3a0bcbb22bad87d85e9fd", "timestamp": "", "source": "github", "line_count": 191, "max_line_length": 426, "avg_line_length": 41.068062827225134, "alnum_prop": 0.7389087200407956, "repo_name": "quantumpayment/bopminer", "id": "8829082aa5823a7d4c1bc9b86b11dd211f98e202", "size": "7861", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "87464"}, {"name": "C++", "bytes": "568233"}, {"name": "CMake", "bytes": "34069"}, {"name": "Cuda", "bytes": "50365"}, {"name": "Shell", "bytes": "2560"}]}} {"text": "from accessData import getHDFData\nfrom voronoi import voronoi\nimport inSituCluster as clusterLib\nimport matplotlib.pyplot as plt\nfrom IPython import embed\nimport numpy as np\n#import cv2\nimport os\nimport scipy.spatial as kd\n\nkdt = None\narr = None\ngradx = None\ngrady = None\ngrad = None\nimgCntr = 0\nyPts = None\nxPts = None\n\n\ndef createHist(inputData, lowerBound=None, upperBound=None, ignore=None):\n \"\"\" Create Histogram with matplotlib\n Parameters:\n data - The input data to be processed\n lowerBound - The smallest value to consider for the histogram\n upperBound - the largest value to consider for the histogram\n ignore - A list of values to ignore from the histogram\n \"\"\"\n\n data = inputData.copy()\n if ignore is not None:\n for val in ignore: # Consecutively remove the values to be ignored\n data = data[np.where(data != val)]\n if lowerBound is None:\n lowerBound = data.min()\n if upperBound is None:\n upperBound = data.max()\n sampleData = data[np.where(data < upperBound)]\n sampleData = sampleData[np.where(sampleData >= lowerBound)]\n sampleSize = sampleData.size\n# Scott's (1979) bin width. Based on the standard deviation\n# and the data size\n n = np.power(sampleSize, 1.0 / 3)\n std = np.std(sampleData)\n bin_width = (3.49 * std) / n\n histSize = int((upperBound - lowerBound) / bin_width)\n ranges = (lowerBound, upperBound)\n hist = plt.hist(sampleData, histSize, ranges)\n plt.axes().set_aspect('equal')\n plt.show()\n\n return hist\n\n\ndef quantize(data, cutoff, numRegions=8, tolerance=0.0001):\n\n newData = data[::4, ::4] # Downsample data to speed up by a factor of 16\n # Next remove clouds poorly represented data\n cleanData = newData[np.logical_and(newData > 0, newData < cutoff)]\n\n tess = voronoi(cleanData, numRegions, tolerance, data)\n\n thresh = []\n for val in tess.values():\n thresh.append(np.min(val))\n thresh = np.sort(thresh)\n\n (rows, cols) = data.shape\n qData = np.ndarray((rows, cols), dtype='uint8')\n qData.fill(255)\n\n for val, t in enumerate(thresh):\n qData[data > t] = val + 1\n\n qData[data == 0] = 0 # Put back the land values\n qData[data == -1] = 255 # Put back the cloud values\n\n return qData\n\n\ndef colorMap(data):\n\n colors = [(123, 0, 255), (0, 0, 255), (0, 255, 255), (0, 255, 0),\n (123, 255, 0), (255, 255, 0), (255, 123, 0), (255, 0, 0)]\n\n (rows, cols) = data.shape\n color = np.ndarray((rows, cols, 3), dtype='uint8')\n color.fill(255)\n\n # Fill land with black\n color[data == 0] = (0, 0, 0)\n\n for i, c in enumerate(colors):\n color[data == i + 1] = c\n #plt.imshow(color)\n #plt.show()\n\n return color\n\n\ndef writeClouds(data):\n\n # Only clouds should be left as -1\n locs = np.where(data == -1)\n numPts = locs[0].size\n with open(\"data/clouds.dat\", 'w') as fd:\n for i in xrange(numPts):\n if i % 100000 == 0:\n print i\n x = locs[0][i]\n y = locs[1][i]\n fd.write(\"%d %d\\n\" % (x, y))\n\n\ndef findSurroundingVal(qData, cluster, clusterVal=255):\n \"\"\" Summary:\n Check and see if the cluster is surrounded by a solid value, if it\n is, return the value which surrounds it. This one checks for border\n conditions\n Parameters:\n qData: The quantized satellite data.\n cluster: A 2D array of shape (2,n) where n is the number of points\n composing the cluster. The two rows are parallel arrays of x\n then y coordinates respectively.\n Returns:\n theValue: The surrounding value found.\n \"\"\"\n\n theValue = None\n\n lk = set([(0, 1), (1, 1), (1, 0), (-1, 0), (-1, 1)]) # Left Kernel\n rk = set([(1, -1), (1, 0), (-1, 0), (-1, -1), (0, -1)]) # Right Kernel\n tk = set([(1, -1), (0, 1), (1, 1), (0, -1), (1, 0)]) # Top Kernel\n bk = set([(-1, -1), (0, 1), (-1, 1), (0, -1), (-1, 0)]) # Bottom Kernel\n mk = np.array(tuple(lk.union(rk))) # Middle Kernel\n tlk = np.array(tuple(tk.intersection(lk))) # Top Left Kernel\n trk = np.array(tuple(tk.intersection(rk))) # Top Right Kernel\n blk = np.array(tuple(bk.intersection(lk))) # Bottom Left Kernel\n brk = np.array(tuple(bk.intersection(rk))) # Bottom Right Kernel\n lk = np.array(tuple(lk))\n rk = np.array(tuple(rk))\n tk = np.array(tuple(tk))\n bk = np.array(tuple(bk))\n\n rows, cols = qData.shape\n neighbors = {}\n\n for pt in cluster.T:\n row, col = pt\n k = None\n kused = None\n if row > 0:\n if col > 0:\n if row < rows - 1:\n if col < cols - 1:\n k = (mk + pt).T\n kused = \"middle\"\n else:\n k = (rk + pt).T\n kused = \"right\"\n elif col < cols - 1:\n k = (bk + pt).T\n kused = \"bottom\"\n else:\n k = (brk + pt).T\n kused = \"bottom right\"\n elif row < rows - 1:\n k = (lk + pt).T\n kused = \"left\"\n else:\n k = (blk + pt).T\n kused = \"bottom left\"\n elif col > 0:\n if col < cols - 1:\n k = (tk + pt).T\n kused = \"top\"\n else:\n k = (trk + pt).T\n kused = \"top right\"\n else:\n k = (tlk + pt).T\n kused = \"top left\"\n\n try:\n values = qData[tuple(k)]\n except:\n print \"row %d out of %d\" % (row, rows)\n print \"col %d out of %d\" % (col, cols)\n print \"pt = \", pt\n print \"k used = \", kused\n print \"resulting k = \", k\n qData.fill(0)\n qData[tuple(pt)] = 255\n plt.imshow(qData)\n plt.axes().set_aspect('equal')\n plt.show()\n\n if clusterVal == 255:\n for value in values:\n if value == clusterVal or value == 0 or value == 255:\n continue\n\n if theValue is None:\n theValue = value\n elif value != theValue:\n return None\n else:\n for value in values:\n if value == clusterVal or value == 0:\n continue\n\n if value in neighbors.keys():\n neighbors[value] += 1\n else:\n neighbors[value] = 1\n\n maximum = 0\n for k, v in neighbors.iteritems():\n if v > maximum:\n theValue = k\n\n if theValue is None:\n return clusterVal\n else:\n return theValue\n\n\ndef filter(img):\n \"\"\"\n Summary: A filter to remove all points with less than 3 neighbors.\n Parameters:\n img - The image to be filtered\n Returns:\n output - The filtered dataset. Same dimensions as img\n \"\"\"\n\n # Add the border mask\n cpy = np.ndarray((img.shape[0] + 2, img.shape[1] + 2))\n cpy[:, 0] = 0\n cpy[0, :] = 0\n cpy[:, -1] = 0\n cpy[-1, :] = 0\n cpy[1:-1, 1:-1] = img.copy()\n h, w = cpy.shape\n for y in xrange(1, h - 2):\n for x in xrange(1, w - 2):\n val = img[y, x]\n # Extract a 3x3 sub matrix\n submat = cpy[y - 1:y + 2, x - 1:x + 2]\n #print \"submat \\n\", submat\n #print \"kernel \\n\", kernel\n numOccurence = submat[submat == val].size\n\n if numOccurence < 3:\n cpy[y, x] = 0.0 # So remove it!\n\n output = np.where(cpy == 1)\n\n return output\n\n\ndef findMaximama(array):\n \"\"\" Find the maxima of a 1D array \"\"\"\n\n binSizes = array[0]\n binVals = array[1][:-1]\n c = (np.diff(np.sign(np.diff(binSizes))) < 0).nonzero()[0] + 1 # local max\n\n # Set to True to plot the maxima\n if False:\n plt.plot(binVals, binSizes)\n plt.plot(binVals[c], binSizes[c], \"o\", label=\"max\")\n plt.legend()\n plt.axes().set_aspect('equal')\n plt.show()\n\n return binVals[c]\n\n\ndef extractStepEdges(img):\n \"\"\"\n Summary: A filter to find all step edges in the dataset:\n img - The image to be filtered\n Returns:\n output - The filtered dataset. Same dimensions as img\n \"\"\"\n\n subImg = img[1:-1, 1:-1]\n pts = np.array(np.where(subImg == 1))\n\n yVals = []\n xVals = []\n for y, x in pts.T:\n y += 1 # Pts were found with borders extracted\n x += 1 # Pts were found with borders extracted\n val = img[y,x]\n # Extract a 3x3 sub matrix\n submat = img[y - 1:y + 2, x - 1:x + 2]\n targetPts = np.logical_and(submat != val, submat != 255)\n numOccurence = submat[targetPts].size\n\n if numOccurence > 0:\n yVals.append(y)\n xVals.append(x)\n\n edgePts = np.array((yVals, xVals))\n\n #img[(yVals, xVals)] = 8\n #plt.imshow(colorMap(img))\n #plt.show()\n\n return edgePts\n\n\ndef buildKDTree(data):\n global kdt, arr\n\n arr = np.asarray(zip(data[0], data[1]))\n kdt = kd.cKDTree(arr)\n\n\ndef getGrads(img, kernelSize):\n global gradx, grady, grad\n\n print \"Computing xderiv\"\n gradx = xDeriv(img, kernelSize)\n print \"Computing yderiv\"\n grady = yDeriv(img, kernelSize)\n\n\ndef getSobelKernel(size, direction):\n if size % 2 == 0:\n print \"\\nKernel size must be odd!\\n\"\n exit()\n\n magic = size / 2\n kernel = list()\n for i in xrange(-magic, magic + 1):\n row = list()\n if i < 0:\n pivot = i - magic\n for j in xrange(-magic, magic + 1):\n e = pivot + np.abs(j)\n row.append(e)\n kernel.append(row)\n elif i == 0:\n row = [0] * size\n kernel.append(row)\n else:\n pivot = i + magic\n for j in xrange(-magic, magic + 1):\n e = pivot - np.abs(j)\n row.append(e)\n kernel.append(row)\n\n if direction == 'y':\n return np.asarray(kernel)\n elif direction == 'x':\n return np.asarray(kernel).T\n\n\ndef xDeriv(img, kernelSize):\n\n s = kernelSize\n if s % 2 == 0:\n sys.exit(\"Kernel size must be odd!\\n\")\n kx = getSobelKernel(s, 'x')\n\n h, w = img.shape\n xgrad = np.zeros(img.shape)\n sigma = s / 2\n #blurImg = cv2.GaussianBlur(img, (s, s), sigma)\n for j in xrange(s / 2, h - s / 2):\n for i in xrange(s / 2, w - s / 2):\n submat = blurImg[j - s / 2: j + s / 2 + 1, i - s / 2: i + s / 2 + 1]\n val = (kx * submat).sum()\n xgrad[j, i] = val\n\n return xgrad\n\n\ndef yDeriv(img, kernelSize):\n\n s = kernelSize\n if s % 2 == 0:\n sys.exit(\"Kernel size must be odd!\\n\")\n ky = getSobelKernel(s, 'y')\n\n h, w = img.shape\n ygrad = np.zeros(img.shape)\n sigma = s / 2\n #blurImg = cv2.GaussianBlur(img, (s, s), sigma)\n for j in xrange(s / 2, h - s / 2):\n for i in xrange(s / 2, w - s / 2):\n submat = blurImg[j - s / 2: j + s / 2 + 1, i - s / 2: i + s / 2 + 1]\n val = (ky * submat).sum()\n ygrad[j, i] = val\n\n return ygrad\n\n\ndef preprocess(data, cutoff):\n\n global kdt\n\n numRegions = 8\n clustDist = 1\n\n print \"Quantizing the Data...\\n\"\n qData = quantize(data, cutoff, numRegions)\n\n #plt.figure(0)\n #plt.title(\"After Quantization\")\n #plt.imshow(colorMap(qData))\n\n # Cluster the Clouds using efficient KDT\n print \"Clustering Clouds...\\n\"\n cloudsB4 = np.array(np.where(qData == 255))\n C = clusterLib.Cluster()\n cloudClusters = C.clusterGrid(cloudsB4, clustDist)\n\n # Remove the clouds once and prepare for filtering\n print \"Removing clouds...\\n\"\n cloudsB4 = np.array(np.where(qData == 255))\n for cluster in cloudClusters:\n c = type(cluster)\n if c is int:\n c = [cluster]\n else:\n c = list(cluster)\n\n cloud = cloudsB4[:, c]\n val = findSurroundingVal(qData, cloud)\n if val is not None:\n qData[tuple(cloud)] = val\n\n #plt.figure(1)\n #plt.title(\"After Removing Clouds\")\n #plt.imshow(colorMap(qData))\n\n # Filter the data\n print \"Filtering out Small Clusters...\\n\"\n for region in xrange(numRegions):\n clusterVal = numRegions - region\n if not clusterVal in qData:\n continue\n dataPts = np.array(np.where(qData == clusterVal))\n print \"\\tFiltering %d Data Points in Region %d...\" % \\\n (dataPts[0].size, clusterVal)\n C = clusterLib.Cluster()\n regionClusters = C.clusterGrid(dataPts, clustDist)\n for cluster in regionClusters:\n if type(cluster) is int or len(cluster) > 250:\n continue\n r = dataPts[:, tuple(cluster)]\n val = findSurroundingVal(qData, r, clusterVal)\n qData[tuple(r)] = val\n\n #plt.figure(2)\n #plt.title(\"After Filtering Data\")\n #plt.imshow(colorMap(qData))\n\n # Remove the clouds again\n print \"Removing Clouds Again...\"\n cloudsB4 = np.array(np.where(qData == 255))\n C = clusterLib.Cluster()\n cloudClusters = C.clusterGrid(cloudsB4, clustDist)\n\n for cluster in cloudClusters:\n c = type(cluster)\n if c is int:\n c = [cluster]\n else:\n c = list(cluster)\n try:\n cloud = cloudsB4[:, c]\n except:\n embed()\n val = findSurroundingVal(qData, cloud)\n if val is not None:\n qData[tuple(cloud)] = val\n\n #plt.figure(3)\n #plt.title(\"After Removing Clouds After Filtering Data\")\n #plt.imshow(colorMap(qData))\n #plt.axes().set_aspect('equal')\n #plt.show()\n\n #print \"Getting Gradient Values...\\n\"\n #if gradx is None or grady is None:\n #getGrads(qData, ptsBtwnKnts)\n\n print \"Extracting Edges...\\n\"\n edgePts = extractStepEdges(qData)\n\n #colorData = colorMap(qData)\n #plt.imshow(colorData)\n #plt.axes().set_aspect('equal')\n #plt.show()\n\n return edgePts\n", "meta": {"content_hash": "bca4c9d2224948497feaa31b3040fee1", "timestamp": "", "source": "github", "line_count": 501, "max_line_length": 80, "avg_line_length": 28.167664670658684, "alnum_prop": 0.5290532879818595, "repo_name": "mathnathan/smartQuantization", "id": "22b52df90cff7a73ff37c8378d4906a4518c9525", "size": "14112", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "preprocessing.py", "mode": "33261", "license": "mit", "language": [{"name": "Python", "bytes": "45169"}]}} {"text": "\"\"\"Expectation value for a diagonal observable using a sampler primitive.\"\"\"\n\nfrom __future__ import annotations\n\nfrom collections.abc import Callable, Sequence, Mapping\nfrom typing import Any\n\nfrom dataclasses import dataclass\n\nimport numpy as np\nfrom qiskit.algorithms.algorithm_job import AlgorithmJob\nfrom qiskit.circuit import QuantumCircuit\nfrom qiskit.primitives import BaseSampler, BaseEstimator, EstimatorResult\nfrom qiskit.primitives.utils import init_observable, _circuit_key\nfrom qiskit.opflow import PauliSumOp\nfrom qiskit.quantum_info import SparsePauliOp\nfrom qiskit.quantum_info.operators.base_operator import BaseOperator\n\n\n@dataclass(frozen=True)\nclass _DiagonalEstimatorResult(EstimatorResult):\n \"\"\"A result from an expectation of a diagonal observable.\"\"\"\n\n # TODO make each measurement a dataclass rather than a dict\n best_measurements: Sequence[Mapping[str, Any]] | None = None\n\n\nclass _DiagonalEstimator(BaseEstimator):\n \"\"\"An estimator for diagonal observables.\"\"\"\n\n def __init__(\n self,\n sampler: BaseSampler,\n aggregation: float | Callable[[Sequence[tuple[float, float]]], float] | None = None,\n callback: Callable[[Sequence[Mapping[str, Any]]], None] | None = None,\n **options,\n ) -> None:\n r\"\"\"Evaluate the expectation of quantum state with respect to a diagonal operator.\n\n Args:\n sampler: The sampler used to evaluate the circuits.\n aggregation: The aggregation function to aggregate the measurement outcomes. If a float\n this specified the CVaR :math:`\\alpha` parameter.\n callback: A callback which is given the best measurements of all circuits in each\n evaluation.\n run_options: Options for the sampler.\n\n \"\"\"\n super().__init__(options=options)\n self.sampler = sampler\n if not callable(aggregation):\n aggregation = _get_cvar_aggregation(aggregation)\n\n self.aggregation = aggregation\n self.callback = callback\n\n def _run(\n self,\n circuits: Sequence[QuantumCircuit],\n observables: Sequence[BaseOperator | PauliSumOp],\n parameter_values: Sequence[Sequence[float]],\n **run_options,\n ) -> AlgorithmJob:\n circuit_indices = []\n for circuit in circuits:\n key = _circuit_key(circuit)\n index = self._circuit_ids.get(key)\n if index is not None:\n circuit_indices.append(index)\n else:\n circuit_indices.append(len(self._circuits))\n self._circuit_ids[key] = len(self._circuits)\n self._circuits.append(circuit)\n self._parameters.append(circuit.parameters)\n observable_indices = []\n for observable in observables:\n index = self._observable_ids.get(id(observable))\n if index is not None:\n observable_indices.append(index)\n else:\n observable_indices.append(len(self._observables))\n self._observable_ids[id(observable)] = len(self._observables)\n converted_observable = init_observable(observable)\n _check_observable_is_diagonal(converted_observable) # check it's diagonal\n self._observables.append(converted_observable)\n job = AlgorithmJob(\n self._call, circuit_indices, observable_indices, parameter_values, **run_options\n )\n job.submit()\n return job\n\n def _call(\n self,\n circuits: Sequence[int],\n observables: Sequence[int],\n parameter_values: Sequence[Sequence[float]],\n **run_options,\n ) -> _DiagonalEstimatorResult:\n job = self.sampler.run(\n [self._circuits[i] for i in circuits],\n parameter_values,\n **run_options,\n )\n sampler_result = job.result()\n samples = sampler_result.quasi_dists\n\n # a list of dictionaries containing: {state: (measurement probability, value)}\n evaluations = [\n {\n state: (probability, _evaluate_sparsepauli(state, self._observables[i]))\n for state, probability in sampled.items()\n }\n for i, sampled in zip(observables, samples)\n ]\n\n results = np.array([self.aggregation(evaluated.values()) for evaluated in evaluations])\n\n # get the best measurements\n best_measurements = []\n num_qubits = self._circuits[0].num_qubits\n for evaluated in evaluations:\n best_result = min(evaluated.items(), key=lambda x: x[1][1])\n best_measurements.append(\n {\n \"state\": best_result[0],\n \"bitstring\": bin(best_result[0])[2:].zfill(num_qubits),\n \"value\": best_result[1][1],\n \"probability\": best_result[1][0],\n }\n )\n\n if self.callback is not None:\n self.callback(best_measurements)\n\n return _DiagonalEstimatorResult(\n values=results, metadata=sampler_result.metadata, best_measurements=best_measurements\n )\n\n\ndef _get_cvar_aggregation(alpha):\n \"\"\"Get the aggregation function for CVaR with confidence level ``alpha``.\"\"\"\n if alpha is None:\n alpha = 1\n elif not 0 <= alpha <= 1:\n raise ValueError(f\"alpha must be in [0, 1] but was {alpha}\")\n\n # if alpha is close to 1 we can avoid the sorting\n if np.isclose(alpha, 1):\n\n def aggregate(measurements):\n return sum(probability * value for probability, value in measurements)\n\n else:\n\n def aggregate(measurements):\n # sort by values\n sorted_measurements = sorted(measurements, key=lambda x: x[1])\n\n accumulated_percent = 0 # once alpha is reached, stop\n cvar = 0\n for probability, value in sorted_measurements:\n cvar += value * max(probability, alpha - accumulated_percent)\n accumulated_percent += probability\n if accumulated_percent >= alpha:\n break\n\n return cvar / alpha\n\n return aggregate\n\n\ndef _evaluate_sparsepauli(state: int, observable: SparsePauliOp) -> complex:\n return sum(\n coeff * _evaluate_bitstring(state, paulistring)\n for paulistring, coeff in observable.label_iter()\n )\n\n\ndef _evaluate_bitstring(state: int, paulistring: str) -> float:\n \"\"\"Evaluate a bitstring on a Pauli label.\"\"\"\n n = len(paulistring) - 1\n return np.prod(\n [-1 if state & (1 << (n - i)) else 1 for i, pauli in enumerate(paulistring) if pauli == \"Z\"]\n )\n\n\ndef _check_observable_is_diagonal(observable: SparsePauliOp) -> bool:\n is_diagonal = not np.any(observable.paulis.x)\n if not is_diagonal:\n raise ValueError(\"The observable must be diagonal.\")\n", "meta": {"content_hash": "65b388c6e4cb2fc3e560d64bc8354443", "timestamp": "", "source": "github", "line_count": 190, "max_line_length": 100, "avg_line_length": 36.26315789473684, "alnum_prop": 0.6169811320754717, "repo_name": "QISKit/qiskit-sdk-py", "id": "67835c9e55a04dca65879ec5c4a4440f6699cc5c", "size": "7368", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "qiskit/algorithms/minimum_eigensolvers/diagonal_estimator.py", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2582"}, {"name": "C++", "bytes": "327518"}, {"name": "CMake", "bytes": "19294"}, {"name": "Makefile", "bytes": "5608"}, {"name": "Pascal", "bytes": "2444"}, {"name": "Python", "bytes": "1312801"}, {"name": "Shell", "bytes": "8385"}]}} {"text": "/*jslint es5:true, indent:2, maxlen:80, node:true*/\n/*global suite:true, test:true, suiteSetup:true, suiteTeardown:true, setup:true,\n teardown:true*/ // Mocha\n'use strict';\n\n// this module\n\n// http://stackoverflow.com/questions/1606797\nfunction construct(constructor, args) {\n function F() {\n return constructor.apply(this, args);\n }\n F.prototype = constructor.prototype;\n return new F();\n}\n\n// exports\n\nmodule.exports = function (SubArray, suite, test, assert) {\n [\n [],\n ['a', 'b', 'c']\n ].forEach(function (args) {\n suite('constructed with: ' + JSON.stringify(args), function () {\n var subArray;\n\n test('constructed without incident', function () {\n subArray = construct(SubArray, args);\n assert(true, 'made it without throwing errors');\n });\n\n test('detectable as an Array', function () {\n assert.equal(Object.prototype.toString.call(subArray),\n '[object Array]');\n });\n\n test('inherits from global Array', function () {\n assert(subArray instanceof Array, 'instanceof Array');\n });\n\n test('initial length is correct', function () {\n assert.equal(subArray.length, args.length);\n });\n\n test('\"last\" method returns last of initial args', function () {\n var last = args.length ? args[args.length - 1] : undefined;\n assert(subArray.last, '\"last\" method exists');\n assert.equal(subArray.last(), last);\n });\n\n test('\"push\" adds a new last element', function () {\n subArray.push('last');\n assert(subArray.last, '\"last\" method exists');\n assert.equal(subArray.last(), 'last');\n });\n\n test('length is incremented post-\"push\"', function () {\n assert.equal(subArray.length, args.length + 1);\n });\n\n test('\"pop\" returns last element', function () {\n assert.equal(subArray.pop(), 'last');\n });\n\n test('\"last\" method returns last of initial args', function () {\n var last = args.length ? args[args.length - 1] : undefined;\n assert(subArray.last, '\"last\" method exists');\n assert.equal(subArray.last(), last);\n });\n\n test('length is decremented post-\"pop\"', function () {\n assert.equal(subArray.length, args.length);\n });\n\n test('setting length higher changes highest index', function () {\n subArray.length = 10;\n assert.equal(subArray.length, 10);\n subArray.push('last');\n assert.equal(subArray.length, 11);\n assert.equal(subArray[10], 'last');\n });\n\n test('setting length lower changes highest index', function () {\n subArray.length = 0;\n assert.equal(subArray.length, 0);\n subArray.push('last');\n assert.equal(subArray.length, 1);\n assert.equal(subArray[0], 'last');\n });\n\n test('add an element by direct index', function () {\n subArray[0] = 'abc';\n assert.equal(subArray.length, 1, 'length is updated');\n assert.equal(subArray[0], 'abc', 'element was stored/retrieved');\n assert.equal(subArray.last(), 'abc', '\"last\" finds element');\n });\n\n test('add an element by higher direct index', function () {\n subArray[2] = 'abc';\n assert.equal(subArray.length, 3, 'length is updated');\n assert(!subArray[1], 'skipped index is falsey');\n assert.equal(subArray[2], 'abc', 'element was stored/retrieved');\n assert.equal(subArray.last(), 'abc', '\"last\" finds element');\n });\n });\n });\n }\n", "meta": {"content_hash": "4502a50c40fc200d1512c0d3ebed7d6c", "timestamp": "", "source": "github", "line_count": 107, "max_line_length": 80, "avg_line_length": 36.728971962616825, "alnum_prop": 0.5302798982188295, "repo_name": "jokeyrhyme/js-sub-array-tests", "id": "329976486606c177cb08a3f9cac3436c4852483d", "size": "3930", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/tests.js", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "9598"}]}} {"text": "\n\n/**\n * Keyboard class handles the keyboard state for the InputHandler\n */\nclass Keyboard {\n /**\n * Constructor for the Keyboard class\n */\n constructor() {\n // Store state\n this.keys = {};\n\n // Setup listeners for key events\n document.addEventListener('keydown', (e) => this._onKeyDown(e), false);\n document.addEventListener('keyup', (e) => this._onKeyUp(e), false);\n }\n\n /**\n * Callback listener for keydown event\n */\n _onKeyDown(e) {\n this.keys[e.key.toLowerCase()] = true;\n e.preventDefault();\n e.stopPropagation();\n }\n\n /**\n * Callback listener for keyup event\n */\n _onKeyUp(e) {\n this.keys[e.key.toLowerCase()] = false;\n }\n\n /**\n * Check if a key is down,\n * @param {string} key The key to check\n */\n keyIsPressed(key) {\n if (this.keys[key.toLowerCase()]) {\n return true;\n } else {\n return false;\n }\n }\n}\n\nexport { Keyboard as default };\n", "meta": {"content_hash": "63ea91615a683d532076681bf5b12a47", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 75, "avg_line_length": 19.166666666666668, "alnum_prop": 0.592391304347826, "repo_name": "nicholasnelson/uncondemned", "id": "858d33ae2fcb2ec0f40ac4f109b9b20cff96f1e7", "size": "2052", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/scripts/GameObjects/Player/InputHandler/Keyboard.js", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "2095"}, {"name": "JavaScript", "bytes": "1503530"}]}} {"text": "--TEST--\nphpunit-skelgen --class -- BankAccountTest _fixture/BankAccountTest.php BankAccount php://stdout\n--FILE--\nmain();\n?>\n--EXPECTF--\nPHPUnit Skeleton Generator %s by Sebastian Bergmann.\n\n\n

\n
\n
\n Load Devices\n
\n\n
\n
\n
\n \n\n
\n
\n
\n
\n Load Ports\n
\n
\n
\n \n \n
\n
\n
\n Patch!\n
\n
\n
\n

\n {{data.message}} \n

\n
\n\n\n        
\n
\n
\n
\n ConnectPoints in use\n
\n
\n
\n

\n {{data.cpoints}} \n

\n
\n
", "meta": {"content_hash": "a1c33d0a615ef548ab9826888cf05960", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 91, "avg_line_length": 26.77777777777778, "alnum_prop": 0.4972337482710927, "repo_name": "wuwenbin2/onos_bgp_evpn", "id": "34204b4de3f2e852583e1fe73660d07897c2cf37", "size": "1446", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "apps/patchpanel/src/main/resources/app/view/sampleCustom/sampleCustom.html", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "211779"}, {"name": "Groff", "bytes": "1090"}, {"name": "HTML", "bytes": "177259"}, {"name": "Java", "bytes": "25432628"}, {"name": "JavaScript", "bytes": "2970699"}, {"name": "Protocol Buffer", "bytes": "8451"}, {"name": "Python", "bytes": "126671"}, {"name": "Shell", "bytes": "913"}, {"name": "Thrift", "bytes": "16641"}]}} {"text": "export const REQUESTS_BEGIN = 'REQUESTS_BEGIN';\nexport const REQUESTS_END = 'REQUESTS_END';\nexport const REQUESTS_ERROR = 'REQUESTS_ERROR';\n\n// UI\nexport const UPDATE_EQUIPMENT_LIST_UI = 'UPDATE_EQUIPMENT_LIST_UI';\nexport const UPDATE_PHYSICAL_ATTACHMENTS_UI = 'UPDATE_PHYSICAL_ATTACHMENTS_UI';\nexport const UPDATE_OWNERS_UI = 'UPDATE_OWNERS_UI';\nexport const UPDATE_OWNER_CONTACTS_UI = 'UPDATE_OWNER_CONTACTS_UI';\nexport const UPDATE_OWNER_EQUIPMENT_UI = 'UPDATE_OWNER_EQUIPMENT_UI';\nexport const UPDATE_USERS_UI = 'UPDATE_USERS_UI';\nexport const UPDATE_PROJECTS_UI = 'UPDATE_PROJECTS_UI';\nexport const UPDATE_PROJECT_CONTACTS_UI = 'UPDATE_PROJECT_CONTACTS_UI';\nexport const UPDATE_RENTAL_REQUESTS_UI = 'UPDATE_RENTAL_REQUESTS_UI';\nexport const UPDATE_USER_ROLES_UI = 'UPDATE_USER_ROLES_UI';\nexport const UPDATE_GROUPS_LOOKUP = 'UPDATE_GROUPS_LOOKUP';\nexport const UPDATE_PERMISSIONS_LOOKUP = 'UPDATE_PERMISSIONS_LOOKUP';\nexport const UPDATE_ROLES_UI = 'UPDATE_ROLES_UI';\nexport const UPDATE_HISTORY_UI = 'UPDATE_HISTORY_UI';\nexport const UPDATE_DOCUMENTS_UI = 'UPDATE_DOCUMENTS_UI';\n\n// Search\nexport const UPDATE_EQUIPMENT_LIST_SEARCH = 'UPDATE_EQUIPMENT_LIST_SEARCH';\nexport const UPDATE_OWNERS_SEARCH = 'UPDATE_OWNERS_SEARCH';\nexport const UPDATE_PROJECTS_SEARCH = 'UPDATE_PROJECTS_SEARCH';\nexport const UPDATE_RENTAL_REQUESTS_SEARCH = 'UPDATE_RENTAL_REQUESTS_SEARCH';\nexport const UPDATE_USERS_SEARCH = 'UPDATE_USERS_SEARCH';\nexport const UPDATE_ROLES_SEARCH = 'UPDATE_ROLES_SEARCH';\n\n// Lookups\nexport const UPDATE_CITIES_LOOKUP = 'UPDATE_CITIES';\nexport const UPDATE_DISTRICTS_LOOKUP = 'UPDATE_DISTRICTS';\nexport const UPDATE_REGIONS_LOOKUP = 'UPDATE_REGIONS';\nexport const UPDATE_SERVICE_AREAS_LOOKUP = 'UPDATE_SERVICE_AREAS';\nexport const UPDATE_LOCAL_AREAS_LOOKUP = 'UPDATE_LOCAL_AREAS';\nexport const UPDATE_OWNERS_LOOKUP = 'UPDATE_OWNERS_LOOKUP';\nexport const UPDATE_DISTRICT_EQUIPMENT_TYPES_LOOKUP = 'UPDATE_DISTRICT_EQUIPMENT_TYPES_LOOKUP';\nexport const UPDATE_EQUIPMENT_TYPES_LOOKUP = 'UPDATE_EQUIPMENT_TYPES_LOOKUP';\nexport const UPDATE_ROLES_LOOKUP = 'UPDATE_ROLES_LOOKUP';\nexport const UPDATE_PROJECTS_LOOKUP = 'UPDATE_PROJECTS_LOOKUP';\nexport const UPDATE_USERS_LOOKUP = 'UPDATE_USERS_LOOKUP';\n\n// Current User\nexport const UPDATE_CURRENT_USER = 'UPDATE_CURRENT_USER';\n\n// Users\nexport const UPDATE_USERS = 'UPDATE_USERS';\nexport const UPDATE_USER = 'UPDATE_USER';\nexport const ADD_USER = 'ADD_USER';\nexport const DELETE_USER = 'DELETE_USER';\n\n// Favourites\nexport const UPDATE_FAVOURITES = 'UPDATE_FAVOURITES';\nexport const ADD_FAVOURITE = 'ADD_FAVOURITE';\nexport const UPDATE_FAVOURITE = 'UPDATE_FAVOURITE';\nexport const DELETE_FAVOURITE = 'DELETE_FAVOURITE';\n\n// Contacts\nexport const UPDATE_CONTACTS = 'UPDATE_CONTACTS';\nexport const ADD_CONTACT = 'ADD_CONTACT';\nexport const UPDATE_CONTACT = 'UPDATE_CONTACT';\nexport const DELETE_CONTACT = 'DELETE_CONTACT';\n\n// Documents\nexport const UPDATE_DOCUMENTS = 'UPDATE_DOCUMENTS';\nexport const ADD_DOCUMENT = 'ADD_DOCUMENT';\nexport const UPDATE_DOCUMENT = 'UPDATE_DOCUMENT';\nexport const DELETE_DOCUMENT = 'DELETE_DOCUMENT';\n\n// Roles, Permissions\nexport const UPDATE_ROLES = 'UPDATE_ROLES';\nexport const UPDATE_ROLE = 'UPDATE_ROLE';\nexport const ADD_ROLE = 'ADD_ROLE';\nexport const DELETE_ROLE = 'DELETE_ROLE';\nexport const UPDATE_ROLE_PERMISSIONS = 'UPDATE_ROLE_PERMISSIONS';\n\n// Equipment\nexport const UPDATE_EQUIPMENT_LIST = 'UPDATE_EQUIPMENT_LIST';\nexport const ADD_EQUIPMENT = 'ADD_EQUIPMENT';\nexport const UPDATE_EQUIPMENT = 'UPDATE_EQUIPMENT';\n\n// Owners\nexport const UPDATE_OWNERS = 'UPDATE_OWNERS';\nexport const UPDATE_OWNER = 'UPDATE_OWNER';\nexport const ADD_OWNER = 'ADD_OWNER';\nexport const DELETE_OWNER = 'DELETE_OWNER';\n\n// Projects\nexport const UPDATE_PROJECTS = 'UPDATE_PROJECTS';\nexport const UPDATE_PROJECT = 'UPDATE_PROJECT';\nexport const ADD_PROJECT = 'ADD_PROJECT';\n\n// Rental Requests\nexport const UPDATE_RENTAL_REQUESTS = 'UPDATE_RENTAL_REQUESTS';\nexport const UPDATE_RENTAL_REQUEST = 'UPDATE_RENTAL_REQUEST';\nexport const ADD_RENTAL_REQUEST = 'ADD_RENTAL_REQUEST';\n\n// Rotation List\nexport const UPDATE_RENTAL_REQUEST_ROTATION_LIST = 'UPDATE_RENTAL_REQUEST_ROTATION_LIST';\n\n// Rental Agreements\nexport const UPDATE_RENTAL_AGREEMENT = 'UPDATE_RENTAL_AGREEMENT';\nexport const ADD_RENTAL_AGREEMENT = 'ADD_RENTAL_AGREEMENT';\n\n// Rental Rates, Conditions\nexport const ADD_RENTAL_RATE = 'ADD_RENTAL_RATE';\nexport const UPDATE_RENTAL_RATE = 'UPDATE_RENTAL_RATE';\nexport const DELETE_RENTAL_RATE = 'DELETE_RENTAL_RATE';\nexport const ADD_RENTAL_CONDITION = 'ADD_RENTAL_CONDITION';\nexport const UPDATE_RENTAL_CONDITION = 'UPDATE_RENTAL_CONDITION';\nexport const DELETE_RENTAL_CONDITION = 'DELETE_RENTAL_CONDITION';\n\n// Version\nexport const UPDATE_VERSION = 'UPDATE_VERSION';\n\n// History\nexport const UPDATE_HISTORY = 'UPDATE_HISTORY';\n", "meta": {"content_hash": "176b50923ec8745d4f6e2a8f58e9967f", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 95, "avg_line_length": 41.162393162393165, "alnum_prop": 0.7693106312292359, "repo_name": "plavoieBC/hets", "id": "b05fd770c24c5837e698b0e4cf863c91f5ebb686", "size": "4832", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Client/src/js/actionTypes.js", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "9077"}, {"name": "C#", "bytes": "4126242"}, {"name": "CSS", "bytes": "60442"}, {"name": "HTML", "bytes": "343352"}, {"name": "JavaScript", "bytes": "475168"}, {"name": "PowerShell", "bytes": "805"}, {"name": "Python", "bytes": "4784"}, {"name": "Shell", "bytes": "5609"}]}} {"text": "'use strict';\n\ndescribe('Controller: MainCtrl', function () {\n\n // load the controller's module\n beforeEach(module('autocompleteApp'));\n\n var MainCtrl,\n scope;\n\n // Initialize the controller and a mock scope\n beforeEach(inject(function ($controller, $rootScope) {\n scope = $rootScope.$new();\n MainCtrl = $controller('MainCtrl', {\n $scope: scope\n });\n }));\n\n it('should attach a list of awesomeThings to the scope', function () {\n expect(scope.awesomeThings.length).toBe(3);\n });\n});\n", "meta": {"content_hash": "3ff1f6cd980772cf590a61f6e9038f5e", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 72, "avg_line_length": 23.227272727272727, "alnum_prop": 0.6516634050880626, "repo_name": "theoinglis/controls", "id": "0c1d617c2f9130e104fdd7ff679bc11ec23f78b1", "size": "511", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/spec/controllers/main.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "3936"}, {"name": "JavaScript", "bytes": "81966"}]}} {"text": "import {LocalStore} from \"fluxx\"\nimport * as actions from \"./playFormActions\"\nimport {\n defaultFieldValidation, FieldValidation, updateFieldValidation,\n updateUnchangedFieldValidation, Valid\n} from \"../../../framework/utils/Validation\"\nimport {\n validateFuture, validateNonBlank, validateNonEmptyArray,\n validateNotNull, validatePositiveNumber\n} from \"../../../common/commonValidations\"\nimport {copy} from \"../../../framework/utils/object\";\nimport {Theater} from \"../../theater/theaterModel\";\nimport messages from \"../../../framework/messages\";\nimport * as arrays from \"../../../framework/utils/arrays\";\nimport {Company} from \"../../company/companyModel\";\nimport {Show} from \"../../show/showModel\";\n\nexport interface PlayFormState {\n step: \"loading\" | \"form\" | \"success\"\n company: Company\n show: Show\n availableTheaters: Array\n id?: string\n theater: FieldValidation\n dates: FieldValidation>\n prices: FieldValidation>\n errors?: Array\n}\n\nexport interface PriceValidation {\n name: FieldValidation\n value: FieldValidation\n condition: FieldValidation\n}\n\nexport interface DateValidation {\n date: FieldValidation\n reservationEndDate: FieldValidation\n}\n\nconst defaultDateValidation: DateValidation = {\n date: defaultFieldValidation(null),\n reservationEndDate: defaultFieldValidation(null)\n}\n\nconst defaultPriceValidation: PriceValidation = {\n name: defaultFieldValidation(\"\"),\n value: defaultFieldValidation(0),\n condition: defaultFieldValidation(\"\")\n}\n\nconst initialState: PlayFormState = {\n step: \"loading\",\n company: null,\n show: null,\n availableTheaters: [],\n theater: defaultFieldValidation(\"\"),\n dates: defaultFieldValidation([defaultDateValidation]),\n prices: defaultFieldValidation([defaultPriceValidation])\n}\n\nexport const playFormStore = () => LocalStore(initialState, on => {\n on(actions.initialize, (state, {company, show, play, theaters}) => {\n if (play) {\n return copy(state, {\n step: \"form\",\n company,\n show,\n availableTheaters: theaters,\n id: play.id,\n theater: defaultFieldValidation(play.theater.id),\n dates: defaultFieldValidation([{\n date: defaultFieldValidation(play.date),\n reservationEndDate: defaultFieldValidation(play.reservationEndDate)\n }]),\n prices: defaultFieldValidation(\n play.prices.map(price => ({\n name: defaultFieldValidation(price.name),\n value: defaultFieldValidation(price.value),\n condition: defaultFieldValidation(price.condition)\n }))\n )\n })\n } else {\n return copy(state, {step: \"form\", company, show, availableTheaters: theaters})\n }\n })\n\n on(actions.updateTheater, (state, value) => {\n return copy(state, {\n theater: updateFieldValidation(state.theater, value, validateNonBlank(value))\n })\n })\n\n on(actions.addDate, (state) => {\n return updateDates(state, arrays.append(state.dates.value, defaultDateValidation))\n })\n\n on(actions.removeDate, (state, index) => {\n return updateDates(state, arrays.remove(state.dates.value, index))\n })\n\n on(actions.updateDate, (state, {index, value}) => {\n const currentDate = state.dates.value[index]\n return updateDates(state, arrays.replace(state.dates.value, index, copy(currentDate, {\n date: updateFieldValidation(currentDate.date, value, validateNotNull(value).flatMap(validateFuture)),\n reservationEndDate: updateUnchangedFieldValidation(currentDate.reservationEndDate, currentDate.reservationEndDate.value, validateReservationEndDate(currentDate.reservationEndDate.value, value))\n })))\n })\n\n on(actions.updateReservationEndDate, (state, {index, value}) => {\n const currentDate = state.dates.value[index]\n return updateDates(state, arrays.replace(state.dates.value, index, copy(currentDate, {\n reservationEndDate: updateFieldValidation(currentDate.reservationEndDate, value, validateReservationEndDate(value, currentDate.date.value))\n })))\n })\n\n on(actions.addPrice, (state) => {\n return updatePrices(state, arrays.append(state.prices.value, defaultPriceValidation))\n })\n\n on(actions.removePrice, (state, index) => {\n return updatePrices(state, arrays.remove(state.prices.value, index))\n })\n\n on(actions.updatePriceName, (state, {index, value}) => {\n const currentPrice = state.prices.value[index]\n return updatePrices(state, arrays.replace(state.prices.value, index, copy(currentPrice, {\n name: updateFieldValidation(currentPrice.name, value, validateNonBlank(value))\n })))\n })\n\n on(actions.updatePriceValue, (state, {index, value}) => {\n const currentPrice = state.prices.value[index]\n return updatePrices(state, arrays.replace(state.prices.value, index, copy(currentPrice, {\n value: updateFieldValidation(currentPrice.value, value, validatePositiveNumber(value))\n })))\n })\n\n on(actions.updatePriceCondition, (state, {index, value}) => {\n const currentPrice = state.prices.value[index]\n return updatePrices(state, arrays.replace(state.prices.value, index, copy(currentPrice, {\n condition: updateFieldValidation(currentPrice.condition, value, Valid(value))\n })))\n })\n\n on(actions.closeErrors, (state) => {\n return copy(state, {errors: null})\n })\n\n on(actions.success, (state) => {\n return copy(state, {step: \"success\"})\n })\n})\n\nfunction validateReservationEndDate(reservationEndDate: Date, date: Date) {\n return validateNotNull(reservationEndDate)\n .flatMap(validateFuture)\n .filter(reservationEndDate => reservationEndDate < date, messages.production.play.form.reservationEndDateAfterDate)\n}\n\nfunction updateDates(state: PlayFormState, dates: Array) {\n return copy(state, {\n dates: updateFieldValidation(state.dates, dates, validateDates(dates))\n })\n}\n\nfunction validateDates(dates: Array) {\n return validateNonEmptyArray(dates)\n}\n\nfunction updatePrices(state: PlayFormState, prices: Array) {\n return copy(state, {\n prices: updateFieldValidation(state.prices, prices, validatePrices(prices))\n })\n}\n\nfunction validatePrices(prices: Array) {\n return validateNonEmptyArray(prices)\n}", "meta": {"content_hash": "289114cbb8eef6641f0314e1ad35e0e0", "timestamp": "", "source": "github", "line_count": 180, "max_line_length": 199, "avg_line_length": 34.833333333333336, "alnum_prop": 0.7207336523125997, "repo_name": "kneelnrise/vep", "id": "5858c3ce17fdfd9bbedaacdd09a2d323d9471be1", "size": "6270", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/client/app/production/play/playForm/playFormStore.ts", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "1507"}, {"name": "HTML", "bytes": "86391"}, {"name": "JavaScript", "bytes": "69698"}, {"name": "Scala", "bytes": "430360"}, {"name": "Shell", "bytes": "1778"}]}} {"text": "[![standard-readme compliant](https://img.shields.io/badge/readme%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme)\n\n> HTTP frontend to the Cisco Webex JS SDK\n\nWhy would we put an http server in front of our SDK? Encryption is hard and this lets all of our client test suites (potentially written in languages for which we do not have sdks) do encrypted things without a major time expenditure\n\n- [Install](#install)\n- [Usage](#usage)\n- [Contribute](#contribute)\n- [Maintainers](#maintainers)\n- [License](#license)\n\n## Install\n\n```bash\n npm install -g @webex/webex-server\n```\n\n## Usage\n\nStart the daemon\n\n```bash\nwebex-server\n```\n\nCreate a session (make sure to copy your set cookie header)\n\n```bash\ncurl -X POST \\\n -H \"Content-Type: application/json\" \\\n -d '{\"clientId\":\"\",\"clientSecret\":\"\",\"redirectUri\":\"\",\"scope\":\"\"}' \\\n http://localhost:3000/api/v1/session\n```\n\n(optional) Create a conversation\n\n> SDK: webex.internal.conversation.create({comment: 'first comment', displayName: 'title', participants: ['', '', '']})\n\n```\ncurl -X POST \\\n -H \"Content-Type: application/json\" \\\n -H \"Cookie: \" \\\n -d '[{\"comment\":\"first message\",\"displayName\":\"title\",\"participants\":[\"userId1\",\"userId2\",\"userId3\"]}]'\n -v \\\n http://localhost:3000/api/v1/session/invoke/internal/conversation/create\n```\n\n(optional) Post a message\n\n> SDK: `webex.inernal.conversation.post({url: '', {displayName: 'second comment'}})`\n\n```\ncurl -X POST \\\n -H \"Content-Type: application/json\" \\\n -H \"Cookie: \" \\\n -d [{\"url\":\"\"},{\"displayName\":\"second comment\"}]\n -v \\\n http://localhost:3000/api/v1/session/invoke/internal/conversation/post\n```\n\n(optional) Fetch a conversation\n\n> SDK: `webex.internal.conversation.get({url: ''})`\n> SDK: `webex.internal.conversation.get({url: ''})`\n\n```\ncurl -X POST \\\n -H \"Content-Type: application/json\" \\\n -H \"Cookie: \" \\\n -d [{\"url\":\"\"}]\n -v \\\n http://localhost:3000/api/v1/session/invoke/internal/conversation/get\n```\n\nClean up your session (If you don't do this, you'll have a bunch of long-running web socket connections)\n\n```\ncurl -X DELETE \\\n -H \"Content-Type: application/json\" \\\n -H \"Cookie: \" \\\n -v \\\n http://localhost:3000/api/v1/session\n```\n\n## Maintainers\n\nThis package is maintained by [Cisco Webex for Developers](https://developer.webex.com/).\n\n## Contribute\n\nPull requests welcome. Please see [CONTRIBUTING.md](https://github.com/webex/webex-js-sdk/blob/master/CONTRIBUTING.md) for more details.\n\n## License\n\n\u00a9 2016-2019 Cisco and/or its affiliates. All Rights Reserved.\n", "meta": {"content_hash": "3973dcd97084c8ce044d305421e07c44", "timestamp": "", "source": "github", "line_count": 96, "max_line_length": 233, "avg_line_length": 30.020833333333332, "alnum_prop": 0.7002081887578071, "repo_name": "bbender/spark-js-sdk", "id": "d409b7632c7179312aeeb871649f17e4e36a7ac0", "size": "2906", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/node_modules/@webex/webex-server/README.md", "mode": "33188", "license": "mit", "language": [{"name": "Dockerfile", "bytes": "877"}, {"name": "JavaScript", "bytes": "112240"}, {"name": "Python", "bytes": "4743"}, {"name": "Shell", "bytes": "21329"}]}} {"text": "cmake_minimum_required( VERSION 2.6.3 )\n\nif( DEFINED CMAKE_CROSSCOMPILING )\n # subsequent toolchain loading is not really needed\n return()\nendif()\n\nif( CMAKE_TOOLCHAIN_FILE )\n # touch toolchain variable only to suppress \"unused variable\" warning\nendif()\n\nget_property( _CMAKE_IN_TRY_COMPILE GLOBAL PROPERTY IN_TRY_COMPILE )\nif( _CMAKE_IN_TRY_COMPILE )\n include( \"${CMAKE_CURRENT_SOURCE_DIR}/../android.toolchain.config.cmake\" OPTIONAL )\nendif()\n\n# this one is important\nset( CMAKE_SYSTEM_NAME Linux )\n# this one not so much\nset( CMAKE_SYSTEM_VERSION 1 )\n\n# rpath makes low sence for Android\nset( CMAKE_SKIP_RPATH TRUE CACHE BOOL \"If set, runtime paths are not added when using shared libraries.\" )\n\nset( ANDROID_SUPPORTED_NDK_VERSIONS ${ANDROID_EXTRA_NDK_VERSIONS} -r9d -r9c -r9b -r9 -r8e -r8d -r8c -r8b -r8 -r7c -r7b -r7 -r6b -r6 -r5c -r5b -r5 \"\" )\nif(NOT DEFINED ANDROID_NDK_SEARCH_PATHS)\n if( CMAKE_HOST_WIN32 )\n file( TO_CMAKE_PATH \"$ENV{PROGRAMFILES}\" ANDROID_NDK_SEARCH_PATHS )\n set( ANDROID_NDK_SEARCH_PATHS \"${ANDROID_NDK_SEARCH_PATHS}/android-ndk\" \"$ENV{SystemDrive}/NVPACK/android-ndk\" )\n else()\n file( TO_CMAKE_PATH \"$ENV{HOME}\" ANDROID_NDK_SEARCH_PATHS )\n set( ANDROID_NDK_SEARCH_PATHS /opt/android-ndk \"${ANDROID_NDK_SEARCH_PATHS}/NVPACK/android-ndk\" )\n endif()\nendif()\nif(NOT DEFINED ANDROID_STANDALONE_TOOLCHAIN_SEARCH_PATH)\n set( ANDROID_STANDALONE_TOOLCHAIN_SEARCH_PATH /opt/android-toolchain )\nendif()\n\nset( ANDROID_SUPPORTED_ABIS_arm \"armeabi-v7a;armeabi;armeabi-v7a with NEON;armeabi-v7a with VFPV3;armeabi-v6 with VFP\" )\nset( ANDROID_SUPPORTED_ABIS_x86 \"x86\" )\nset( ANDROID_SUPPORTED_ABIS_mipsel \"mips\" )\n\nset( ANDROID_DEFAULT_NDK_API_LEVEL 15 )\nset( ANDROID_DEFAULT_NDK_API_LEVEL_x86 15 )\nset( ANDROID_DEFAULT_NDK_API_LEVEL_mips 15 )\n\n\nmacro( __LIST_FILTER listvar regex )\n if( ${listvar} )\n foreach( __val ${${listvar}} )\n if( __val MATCHES \"${regex}\" )\n list( REMOVE_ITEM ${listvar} \"${__val}\" )\n endif()\n endforeach()\n endif()\nendmacro()\n\nmacro( __INIT_VARIABLE var_name )\n set( __test_path 0 )\n foreach( __var ${ARGN} )\n if( __var STREQUAL \"PATH\" )\n set( __test_path 1 )\n break()\n endif()\n endforeach()\n if( __test_path AND NOT EXISTS \"${${var_name}}\" )\n unset( ${var_name} CACHE )\n endif()\n if( \"${${var_name}}\" STREQUAL \"\" )\n set( __values 0 )\n foreach( __var ${ARGN} )\n if( __var STREQUAL \"VALUES\" )\n set( __values 1 )\n elseif( NOT __var STREQUAL \"PATH\" )\n set( __obsolete 0 )\n if( __var MATCHES \"^OBSOLETE_.*$\" )\n string( REPLACE \"OBSOLETE_\" \"\" __var \"${__var}\" )\n set( __obsolete 1 )\n endif()\n if( __var MATCHES \"^ENV_.*$\" )\n string( REPLACE \"ENV_\" \"\" __var \"${__var}\" )\n set( __value \"$ENV{${__var}}\" )\n elseif( DEFINED ${__var} )\n set( __value \"${${__var}}\" )\n else()\n if( __values )\n set( __value \"${__var}\" )\n else()\n set( __value \"\" )\n endif()\n endif()\n if( NOT \"${__value}\" STREQUAL \"\" )\n if( __test_path )\n if( EXISTS \"${__value}\" )\n file( TO_CMAKE_PATH \"${__value}\" ${var_name} )\n if( __obsolete AND NOT _CMAKE_IN_TRY_COMPILE )\n message( WARNING \"Using value of obsolete variable ${__var} as initial value for ${var_name}. Please note, that ${__var} can be completely removed in future versions of the toolchain.\" )\n endif()\n break()\n endif()\n else()\n set( ${var_name} \"${__value}\" )\n if( __obsolete AND NOT _CMAKE_IN_TRY_COMPILE )\n message( WARNING \"Using value of obsolete variable ${__var} as initial value for ${var_name}. Please note, that ${__var} can be completely removed in future versions of the toolchain.\" )\n endif()\n break()\n endif()\n endif()\n endif()\n endforeach()\n unset( __value )\n unset( __values )\n unset( __obsolete )\n elseif( __test_path )\n file( TO_CMAKE_PATH \"${${var_name}}\" ${var_name} )\n endif()\n unset( __test_path )\nendmacro()\n\nmacro( __DETECT_NATIVE_API_LEVEL _var _path )\n SET( __ndkApiLevelRegex \"^[\\t ]*#define[\\t ]+__ANDROID_API__[\\t ]+([0-9]+)[\\t ]*$\" )\n FILE( STRINGS ${_path} __apiFileContent REGEX \"${__ndkApiLevelRegex}\" )\n if( NOT __apiFileContent )\n message( SEND_ERROR \"Could not get Android native API level. Probably you have specified invalid level value, or your copy of NDK/toolchain is broken.\" )\n endif()\n string( REGEX REPLACE \"${__ndkApiLevelRegex}\" \"\\\\1\" ${_var} \"${__apiFileContent}\" )\n unset( __apiFileContent )\n unset( __ndkApiLevelRegex )\nendmacro()\n\nmacro( __DETECT_TOOLCHAIN_MACHINE_NAME _var _root )\n if( EXISTS \"${_root}\" )\n file( GLOB __gccExePath RELATIVE \"${_root}/bin/\" \"${_root}/bin/*-gcc${TOOL_OS_SUFFIX}\" )\n __LIST_FILTER( __gccExePath \"^[.].*\" )\n list( LENGTH __gccExePath __gccExePathsCount )\n if( NOT __gccExePathsCount EQUAL 1 AND NOT _CMAKE_IN_TRY_COMPILE )\n message( WARNING \"Could not determine machine name for compiler from ${_root}\" )\n set( ${_var} \"\" )\n else()\n get_filename_component( __gccExeName \"${__gccExePath}\" NAME_WE )\n string( REPLACE \"-gcc\" \"\" ${_var} \"${__gccExeName}\" )\n endif()\n unset( __gccExePath )\n unset( __gccExePathsCount )\n unset( __gccExeName )\n else()\n set( ${_var} \"\" )\n endif()\nendmacro()\n\n\n# fight against cygwin\nset( ANDROID_FORBID_SYGWIN TRUE CACHE BOOL \"Prevent cmake from working under cygwin and using cygwin tools\")\nmark_as_advanced( ANDROID_FORBID_SYGWIN )\nif( ANDROID_FORBID_SYGWIN )\n if( CYGWIN )\n message( FATAL_ERROR \"Android NDK and android-cmake toolchain are not welcome Cygwin. It is unlikely that this cmake toolchain will work under cygwin. But if you want to try then you can set cmake variable ANDROID_FORBID_SYGWIN to FALSE and rerun cmake.\" )\n endif()\n\n if( CMAKE_HOST_WIN32 )\n # remove cygwin from PATH\n set( __new_path \"$ENV{PATH}\")\n __LIST_FILTER( __new_path \"cygwin\" )\n set(ENV{PATH} \"${__new_path}\")\n unset(__new_path)\n endif()\nendif()\n\n\n# detect current host platform\nif( NOT DEFINED ANDROID_NDK_HOST_X64 AND (CMAKE_HOST_SYSTEM_PROCESSOR MATCHES \"amd64|x86_64|AMD64\" OR CMAKE_HOST_APPLE) )\n set( ANDROID_NDK_HOST_X64 1 CACHE BOOL \"Try to use 64-bit compiler toolchain\" )\n mark_as_advanced( ANDROID_NDK_HOST_X64 )\nendif()\n\nset( TOOL_OS_SUFFIX \"\" )\nif( CMAKE_HOST_APPLE )\n set( ANDROID_NDK_HOST_SYSTEM_NAME \"darwin-x86_64\" )\n set( ANDROID_NDK_HOST_SYSTEM_NAME2 \"darwin-x86\" )\nelseif( CMAKE_HOST_WIN32 )\n set( ANDROID_NDK_HOST_SYSTEM_NAME \"windows-x86_64\" )\n set( ANDROID_NDK_HOST_SYSTEM_NAME2 \"windows\" )\n set( TOOL_OS_SUFFIX \".exe\" )\nelseif( CMAKE_HOST_UNIX )\n set( ANDROID_NDK_HOST_SYSTEM_NAME \"linux-x86_64\" )\n set( ANDROID_NDK_HOST_SYSTEM_NAME2 \"linux-x86\" )\nelse()\n message( FATAL_ERROR \"Cross-compilation on your platform is not supported by this cmake toolchain\" )\nendif()\n\nif( NOT ANDROID_NDK_HOST_X64 )\n set( ANDROID_NDK_HOST_SYSTEM_NAME ${ANDROID_NDK_HOST_SYSTEM_NAME2} )\nendif()\n\n# see if we have path to Android NDK\n__INIT_VARIABLE( ANDROID_NDK PATH ENV_ANDROID_NDK )\nif( NOT ANDROID_NDK )\n # see if we have path to Android standalone toolchain\n __INIT_VARIABLE( ANDROID_STANDALONE_TOOLCHAIN PATH ENV_ANDROID_STANDALONE_TOOLCHAIN OBSOLETE_ANDROID_NDK_TOOLCHAIN_ROOT OBSOLETE_ENV_ANDROID_NDK_TOOLCHAIN_ROOT )\n\n if( NOT ANDROID_STANDALONE_TOOLCHAIN )\n #try to find Android NDK in one of the the default locations\n set( __ndkSearchPaths )\n foreach( __ndkSearchPath ${ANDROID_NDK_SEARCH_PATHS} )\n foreach( suffix ${ANDROID_SUPPORTED_NDK_VERSIONS} )\n list( APPEND __ndkSearchPaths \"${__ndkSearchPath}${suffix}\" )\n endforeach()\n endforeach()\n __INIT_VARIABLE( ANDROID_NDK PATH VALUES ${__ndkSearchPaths} )\n unset( __ndkSearchPaths )\n\n if( ANDROID_NDK )\n message( STATUS \"Using default path for Android NDK: ${ANDROID_NDK}\" )\n message( STATUS \" If you prefer to use a different location, please define a cmake or environment variable: ANDROID_NDK\" )\n else()\n #try to find Android standalone toolchain in one of the the default locations\n __INIT_VARIABLE( ANDROID_STANDALONE_TOOLCHAIN PATH ANDROID_STANDALONE_TOOLCHAIN_SEARCH_PATH )\n\n if( ANDROID_STANDALONE_TOOLCHAIN )\n message( STATUS \"Using default path for standalone toolchain ${ANDROID_STANDALONE_TOOLCHAIN}\" )\n message( STATUS \" If you prefer to use a different location, please define the variable: ANDROID_STANDALONE_TOOLCHAIN\" )\n endif( ANDROID_STANDALONE_TOOLCHAIN )\n endif( ANDROID_NDK )\n endif( NOT ANDROID_STANDALONE_TOOLCHAIN )\nendif( NOT ANDROID_NDK )\n\n# remember found paths\nif( ANDROID_NDK )\n get_filename_component( ANDROID_NDK \"${ANDROID_NDK}\" ABSOLUTE )\n set( ANDROID_NDK \"${ANDROID_NDK}\" CACHE INTERNAL \"Path of the Android NDK\" FORCE )\n set( BUILD_WITH_ANDROID_NDK True )\n if( EXISTS \"${ANDROID_NDK}/RELEASE.TXT\" )\n file( STRINGS \"${ANDROID_NDK}/RELEASE.TXT\" ANDROID_NDK_RELEASE_FULL LIMIT_COUNT 1 REGEX r[0-9]+[a-z]? )\n string( REGEX MATCH r[0-9]+[a-z]? ANDROID_NDK_RELEASE \"${ANDROID_NDK_RELEASE_FULL}\" )\n else()\n set( ANDROID_NDK_RELEASE \"r1x\" )\n set( ANDROID_NDK_RELEASE_FULL \"unreleased\" )\n endif()\nelseif( ANDROID_STANDALONE_TOOLCHAIN )\n get_filename_component( ANDROID_STANDALONE_TOOLCHAIN \"${ANDROID_STANDALONE_TOOLCHAIN}\" ABSOLUTE )\n # try to detect change\n if( CMAKE_AR )\n string( LENGTH \"${ANDROID_STANDALONE_TOOLCHAIN}\" __length )\n string( SUBSTRING \"${CMAKE_AR}\" 0 ${__length} __androidStandaloneToolchainPreviousPath )\n if( NOT __androidStandaloneToolchainPreviousPath STREQUAL ANDROID_STANDALONE_TOOLCHAIN )\n message( FATAL_ERROR \"It is not possible to change path to the Android standalone toolchain on subsequent run.\" )\n endif()\n unset( __androidStandaloneToolchainPreviousPath )\n unset( __length )\n endif()\n set( ANDROID_STANDALONE_TOOLCHAIN \"${ANDROID_STANDALONE_TOOLCHAIN}\" CACHE INTERNAL \"Path of the Android standalone toolchain\" FORCE )\n set( BUILD_WITH_STANDALONE_TOOLCHAIN True )\nelse()\n list(GET ANDROID_NDK_SEARCH_PATHS 0 ANDROID_NDK_SEARCH_PATH)\n message( FATAL_ERROR \"Could not find neither Android NDK nor Android standalone toolchain.\n You should either set an environment variable:\n export ANDROID_NDK=~/my-android-ndk\n or\n export ANDROID_STANDALONE_TOOLCHAIN=~/my-android-toolchain\n or put the toolchain or NDK in the default path:\n sudo ln -s ~/my-android-ndk ${ANDROID_NDK_SEARCH_PATH}\n sudo ln -s ~/my-android-toolchain ${ANDROID_STANDALONE_TOOLCHAIN_SEARCH_PATH}\" )\nendif()\n\n# android NDK layout\nif( BUILD_WITH_ANDROID_NDK )\n if( NOT DEFINED ANDROID_NDK_LAYOUT )\n # try to automatically detect the layout\n if( EXISTS \"${ANDROID_NDK}/RELEASE.TXT\")\n set( ANDROID_NDK_LAYOUT \"RELEASE\" )\n elseif( EXISTS \"${ANDROID_NDK}/../../linux-x86/toolchain/\" )\n set( ANDROID_NDK_LAYOUT \"LINARO\" )\n elseif( EXISTS \"${ANDROID_NDK}/../../gcc/\" )\n set( ANDROID_NDK_LAYOUT \"ANDROID\" )\n endif()\n endif()\n set( ANDROID_NDK_LAYOUT \"${ANDROID_NDK_LAYOUT}\" CACHE STRING \"The inner layout of NDK\" )\n mark_as_advanced( ANDROID_NDK_LAYOUT )\n if( ANDROID_NDK_LAYOUT STREQUAL \"LINARO\" )\n set( ANDROID_NDK_HOST_SYSTEM_NAME ${ANDROID_NDK_HOST_SYSTEM_NAME2} ) # only 32-bit at the moment\n set( ANDROID_NDK_TOOLCHAINS_PATH \"${ANDROID_NDK}/../../${ANDROID_NDK_HOST_SYSTEM_NAME}/toolchain\" )\n set( ANDROID_NDK_TOOLCHAINS_SUBPATH \"\" )\n set( ANDROID_NDK_TOOLCHAINS_SUBPATH2 \"\" )\n elseif( ANDROID_NDK_LAYOUT STREQUAL \"ANDROID\" )\n set( ANDROID_NDK_HOST_SYSTEM_NAME ${ANDROID_NDK_HOST_SYSTEM_NAME2} ) # only 32-bit at the moment\n set( ANDROID_NDK_TOOLCHAINS_PATH \"${ANDROID_NDK}/../../gcc/${ANDROID_NDK_HOST_SYSTEM_NAME}/arm\" )\n set( ANDROID_NDK_TOOLCHAINS_SUBPATH \"\" )\n set( ANDROID_NDK_TOOLCHAINS_SUBPATH2 \"\" )\n else() # ANDROID_NDK_LAYOUT STREQUAL \"RELEASE\"\n set( ANDROID_NDK_TOOLCHAINS_PATH \"${ANDROID_NDK}/toolchains\" )\n set( ANDROID_NDK_TOOLCHAINS_SUBPATH \"/prebuilt/${ANDROID_NDK_HOST_SYSTEM_NAME}\" )\n set( ANDROID_NDK_TOOLCHAINS_SUBPATH2 \"/prebuilt/${ANDROID_NDK_HOST_SYSTEM_NAME2}\" )\n endif()\n get_filename_component( ANDROID_NDK_TOOLCHAINS_PATH \"${ANDROID_NDK_TOOLCHAINS_PATH}\" ABSOLUTE )\n\n # try to detect change of NDK\n if( CMAKE_AR )\n string( LENGTH \"${ANDROID_NDK_TOOLCHAINS_PATH}\" __length )\n string( SUBSTRING \"${CMAKE_AR}\" 0 ${__length} __androidNdkPreviousPath )\n if( NOT __androidNdkPreviousPath STREQUAL ANDROID_NDK_TOOLCHAINS_PATH )\n message( FATAL_ERROR \"It is not possible to change the path to the NDK on subsequent CMake run. You must remove all generated files from your build folder first.\n \" )\n endif()\n unset( __androidNdkPreviousPath )\n unset( __length )\n endif()\nendif()\n\n\n# get all the details about standalone toolchain\nif( BUILD_WITH_STANDALONE_TOOLCHAIN )\n __DETECT_NATIVE_API_LEVEL( ANDROID_SUPPORTED_NATIVE_API_LEVELS \"${ANDROID_STANDALONE_TOOLCHAIN}/sysroot/usr/include/android/api-level.h\" )\n set( ANDROID_STANDALONE_TOOLCHAIN_API_LEVEL ${ANDROID_SUPPORTED_NATIVE_API_LEVELS} )\n set( __availableToolchains \"standalone\" )\n __DETECT_TOOLCHAIN_MACHINE_NAME( __availableToolchainMachines \"${ANDROID_STANDALONE_TOOLCHAIN}\" )\n if( NOT __availableToolchainMachines )\n message( FATAL_ERROR \"Could not determine machine name of your toolchain. Probably your Android standalone toolchain is broken.\" )\n endif()\n if( __availableToolchainMachines MATCHES i686 )\n set( __availableToolchainArchs \"x86\" )\n elseif( __availableToolchainMachines MATCHES arm )\n set( __availableToolchainArchs \"arm\" )\n elseif( __availableToolchainMachines MATCHES mipsel )\n set( __availableToolchainArchs \"mipsel\" )\n endif()\n execute_process( COMMAND \"${ANDROID_STANDALONE_TOOLCHAIN}/bin/${__availableToolchainMachines}-gcc${TOOL_OS_SUFFIX}\" -dumpversion\n OUTPUT_VARIABLE __availableToolchainCompilerVersions OUTPUT_STRIP_TRAILING_WHITESPACE )\n string( REGEX MATCH \"[0-9]+[.][0-9]+([.][0-9]+)?\" __availableToolchainCompilerVersions \"${__availableToolchainCompilerVersions}\" )\n if( EXISTS \"${ANDROID_STANDALONE_TOOLCHAIN}/bin/clang${TOOL_OS_SUFFIX}\" )\n list( APPEND __availableToolchains \"standalone-clang\" )\n list( APPEND __availableToolchainMachines ${__availableToolchainMachines} )\n list( APPEND __availableToolchainArchs ${__availableToolchainArchs} )\n list( APPEND __availableToolchainCompilerVersions ${__availableToolchainCompilerVersions} )\n endif()\nendif()\n\nmacro( __GLOB_NDK_TOOLCHAINS __availableToolchainsVar __availableToolchainsLst __toolchain_subpath )\n foreach( __toolchain ${${__availableToolchainsLst}} )\n if( \"${__toolchain}\" MATCHES \"-clang3[.][0-9]$\" AND NOT EXISTS \"${ANDROID_NDK_TOOLCHAINS_PATH}/${__toolchain}${__toolchain_subpath}\" )\n string( REGEX REPLACE \"-clang3[.][0-9]$\" \"-4.6\" __gcc_toolchain \"${__toolchain}\" )\n else()\n set( __gcc_toolchain \"${__toolchain}\" )\n endif()\n __DETECT_TOOLCHAIN_MACHINE_NAME( __machine \"${ANDROID_NDK_TOOLCHAINS_PATH}/${__gcc_toolchain}${__toolchain_subpath}\" )\n if( __machine )\n string( REGEX MATCH \"[0-9]+[.][0-9]+([.][0-9x]+)?$\" __version \"${__gcc_toolchain}\" )\n if( __machine MATCHES i686 )\n set( __arch \"x86\" )\n elseif( __machine MATCHES arm )\n set( __arch \"arm\" )\n elseif( __machine MATCHES mipsel )\n set( __arch \"mipsel\" )\n endif()\n list( APPEND __availableToolchainMachines \"${__machine}\" )\n list( APPEND __availableToolchainArchs \"${__arch}\" )\n list( APPEND __availableToolchainCompilerVersions \"${__version}\" )\n list( APPEND ${__availableToolchainsVar} \"${__toolchain}\" )\n endif()\n unset( __gcc_toolchain )\n endforeach()\nendmacro()\n\n# get all the details about NDK\nif( BUILD_WITH_ANDROID_NDK )\n file( GLOB ANDROID_SUPPORTED_NATIVE_API_LEVELS RELATIVE \"${ANDROID_NDK}/platforms\" \"${ANDROID_NDK}/platforms/android-*\" )\n string( REPLACE \"android-\" \"\" ANDROID_SUPPORTED_NATIVE_API_LEVELS \"${ANDROID_SUPPORTED_NATIVE_API_LEVELS}\" )\n set( __availableToolchains \"\" )\n set( __availableToolchainMachines \"\" )\n set( __availableToolchainArchs \"\" )\n set( __availableToolchainCompilerVersions \"\" )\n if( ANDROID_TOOLCHAIN_NAME AND EXISTS \"${ANDROID_NDK_TOOLCHAINS_PATH}/${ANDROID_TOOLCHAIN_NAME}/\" )\n # do not go through all toolchains if we know the name\n set( __availableToolchainsLst \"${ANDROID_TOOLCHAIN_NAME}\" )\n __GLOB_NDK_TOOLCHAINS( __availableToolchains __availableToolchainsLst \"${ANDROID_NDK_TOOLCHAINS_SUBPATH}\" )\n if( NOT __availableToolchains AND NOT ANDROID_NDK_TOOLCHAINS_SUBPATH STREQUAL ANDROID_NDK_TOOLCHAINS_SUBPATH2 )\n __GLOB_NDK_TOOLCHAINS( __availableToolchains __availableToolchainsLst \"${ANDROID_NDK_TOOLCHAINS_SUBPATH2}\" )\n if( __availableToolchains )\n set( ANDROID_NDK_TOOLCHAINS_SUBPATH ${ANDROID_NDK_TOOLCHAINS_SUBPATH2} )\n endif()\n endif()\n endif()\n if( NOT __availableToolchains )\n file( GLOB __availableToolchainsLst RELATIVE \"${ANDROID_NDK_TOOLCHAINS_PATH}\" \"${ANDROID_NDK_TOOLCHAINS_PATH}/*\" )\n if( __availableToolchains )\n list(SORT __availableToolchainsLst) # we need clang to go after gcc\n endif()\n __LIST_FILTER( __availableToolchainsLst \"^[.]\" )\n __LIST_FILTER( __availableToolchainsLst \"llvm\" )\n __LIST_FILTER( __availableToolchainsLst \"renderscript\" )\n __GLOB_NDK_TOOLCHAINS( __availableToolchains __availableToolchainsLst \"${ANDROID_NDK_TOOLCHAINS_SUBPATH}\" )\n if( NOT __availableToolchains AND NOT ANDROID_NDK_TOOLCHAINS_SUBPATH STREQUAL ANDROID_NDK_TOOLCHAINS_SUBPATH2 )\n __GLOB_NDK_TOOLCHAINS( __availableToolchains __availableToolchainsLst \"${ANDROID_NDK_TOOLCHAINS_SUBPATH2}\" )\n if( __availableToolchains )\n set( ANDROID_NDK_TOOLCHAINS_SUBPATH ${ANDROID_NDK_TOOLCHAINS_SUBPATH2} )\n endif()\n endif()\n endif()\n if( NOT __availableToolchains )\n message( FATAL_ERROR \"Could not find any working toolchain in the NDK. Probably your Android NDK is broken.\" )\n endif()\nendif()\n\n# build list of available ABIs\nset( ANDROID_SUPPORTED_ABIS \"\" )\nset( __uniqToolchainArchNames ${__availableToolchainArchs} )\nlist( REMOVE_DUPLICATES __uniqToolchainArchNames )\nlist( SORT __uniqToolchainArchNames )\nforeach( __arch ${__uniqToolchainArchNames} )\n list( APPEND ANDROID_SUPPORTED_ABIS ${ANDROID_SUPPORTED_ABIS_${__arch}} )\nendforeach()\nunset( __uniqToolchainArchNames )\nif( NOT ANDROID_SUPPORTED_ABIS )\n message( FATAL_ERROR \"No one of known Android ABIs is supported by this cmake toolchain.\" )\nendif()\n\n# choose target ABI\n__INIT_VARIABLE( ANDROID_ABI OBSOLETE_ARM_TARGET OBSOLETE_ARM_TARGETS VALUES ${ANDROID_SUPPORTED_ABIS} )\n# verify that target ABI is supported\nlist( FIND ANDROID_SUPPORTED_ABIS \"${ANDROID_ABI}\" __androidAbiIdx )\nif( __androidAbiIdx EQUAL -1 )\n string( REPLACE \";\" \"\\\", \\\"\" PRINTABLE_ANDROID_SUPPORTED_ABIS \"${ANDROID_SUPPORTED_ABIS}\" )\n message( FATAL_ERROR \"Specified ANDROID_ABI = \\\"${ANDROID_ABI}\\\" is not supported by this cmake toolchain or your NDK/toolchain.\n Supported values are: \\\"${PRINTABLE_ANDROID_SUPPORTED_ABIS}\\\"\n \" )\nendif()\nunset( __androidAbiIdx )\n\n# set target ABI options\nif( ANDROID_ABI STREQUAL \"x86\" )\n set( X86 true )\n set( ANDROID_NDK_ABI_NAME \"x86\" )\n set( ANDROID_ARCH_NAME \"x86\" )\n set( ANDROID_ARCH_FULLNAME \"x86\" )\n set( ANDROID_LLVM_TRIPLE \"i686-none-linux-android\" )\n set( CMAKE_SYSTEM_PROCESSOR \"i686\" )\nelseif( ANDROID_ABI STREQUAL \"mips\" )\n set( MIPS true )\n set( ANDROID_NDK_ABI_NAME \"mips\" )\n set( ANDROID_ARCH_NAME \"mips\" )\n set( ANDROID_ARCH_FULLNAME \"mipsel\" )\n set( ANDROID_LLVM_TRIPLE \"mipsel-none-linux-android\" )\n set( CMAKE_SYSTEM_PROCESSOR \"mips\" )\nelseif( ANDROID_ABI STREQUAL \"armeabi\" )\n set( ARMEABI true )\n set( ANDROID_NDK_ABI_NAME \"armeabi\" )\n set( ANDROID_ARCH_NAME \"arm\" )\n set( ANDROID_ARCH_FULLNAME \"arm\" )\n set( ANDROID_LLVM_TRIPLE \"armv5te-none-linux-androideabi\" )\n set( CMAKE_SYSTEM_PROCESSOR \"armv5te\" )\nelseif( ANDROID_ABI STREQUAL \"armeabi-v6 with VFP\" )\n set( ARMEABI_V6 true )\n set( ANDROID_NDK_ABI_NAME \"armeabi\" )\n set( ANDROID_ARCH_NAME \"arm\" )\n set( ANDROID_ARCH_FULLNAME \"arm\" )\n set( ANDROID_LLVM_TRIPLE \"armv5te-none-linux-androideabi\" )\n set( CMAKE_SYSTEM_PROCESSOR \"armv6\" )\n # need always fallback to older platform\n set( ARMEABI true )\nelseif( ANDROID_ABI STREQUAL \"armeabi-v7a\")\n set( ARMEABI_V7A true )\n set( ANDROID_NDK_ABI_NAME \"armeabi-v7a\" )\n set( ANDROID_ARCH_NAME \"arm\" )\n set( ANDROID_ARCH_FULLNAME \"arm\" )\n set( ANDROID_LLVM_TRIPLE \"armv7-none-linux-androideabi\" )\n set( CMAKE_SYSTEM_PROCESSOR \"armv7-a\" )\nelseif( ANDROID_ABI STREQUAL \"armeabi-v7a with VFPV3\" )\n set( ARMEABI_V7A true )\n set( ANDROID_NDK_ABI_NAME \"armeabi-v7a\" )\n set( ANDROID_ARCH_NAME \"arm\" )\n set( ANDROID_ARCH_FULLNAME \"arm\" )\n set( ANDROID_LLVM_TRIPLE \"armv7-none-linux-androideabi\" )\n set( CMAKE_SYSTEM_PROCESSOR \"armv7-a\" )\n set( VFPV3 true )\nelseif( ANDROID_ABI STREQUAL \"armeabi-v7a with NEON\" )\n set( ARMEABI_V7A true )\n set( ANDROID_NDK_ABI_NAME \"armeabi-v7a\" )\n set( ANDROID_ARCH_NAME \"arm\" )\n set( ANDROID_ARCH_FULLNAME \"arm\" )\n set( ANDROID_LLVM_TRIPLE \"armv7-none-linux-androideabi\" )\n set( CMAKE_SYSTEM_PROCESSOR \"armv7-a\" )\n set( VFPV3 true )\n set( NEON true )\nelse()\n message( SEND_ERROR \"Unknown ANDROID_ABI=\\\"${ANDROID_ABI}\\\" is specified.\" )\nendif()\n\nif( CMAKE_BINARY_DIR AND EXISTS \"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeSystem.cmake\" )\n # really dirty hack\n # it is not possible to change CMAKE_SYSTEM_PROCESSOR after the first run...\n file( APPEND \"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeSystem.cmake\" \"SET(CMAKE_SYSTEM_PROCESSOR \\\"${CMAKE_SYSTEM_PROCESSOR}\\\")\\n\" )\nendif()\n\nif( ANDROID_ARCH_NAME STREQUAL \"arm\" AND NOT ARMEABI_V6 )\n __INIT_VARIABLE( ANDROID_FORCE_ARM_BUILD OBSOLETE_FORCE_ARM VALUES OFF )\n set( ANDROID_FORCE_ARM_BUILD ${ANDROID_FORCE_ARM_BUILD} CACHE BOOL \"Use 32-bit ARM instructions instead of Thumb-1\" FORCE )\n mark_as_advanced( ANDROID_FORCE_ARM_BUILD )\nelse()\n unset( ANDROID_FORCE_ARM_BUILD CACHE )\nendif()\n\n# choose toolchain\nif( ANDROID_TOOLCHAIN_NAME )\n list( FIND __availableToolchains \"${ANDROID_TOOLCHAIN_NAME}\" __toolchainIdx )\n if( __toolchainIdx EQUAL -1 )\n list( SORT __availableToolchains )\n string( REPLACE \";\" \"\\n * \" toolchains_list \"${__availableToolchains}\" )\n set( toolchains_list \" * ${toolchains_list}\")\n message( FATAL_ERROR \"Specified toolchain \\\"${ANDROID_TOOLCHAIN_NAME}\\\" is missing in your NDK or broken. Please verify that your NDK is working or select another compiler toolchain.\nTo configure the toolchain set CMake variable ANDROID_TOOLCHAIN_NAME to one of the following values:\\n${toolchains_list}\\n\" )\n endif()\n list( GET __availableToolchainArchs ${__toolchainIdx} __toolchainArch )\n if( NOT __toolchainArch STREQUAL ANDROID_ARCH_FULLNAME )\n message( SEND_ERROR \"Selected toolchain \\\"${ANDROID_TOOLCHAIN_NAME}\\\" is not able to compile binaries for the \\\"${ANDROID_ARCH_NAME}\\\" platform.\" )\n endif()\nelse()\n set( __toolchainIdx -1 )\n set( __applicableToolchains \"\" )\n set( __toolchainMaxVersion \"0.0.0\" )\n list( LENGTH __availableToolchains __availableToolchainsCount )\n math( EXPR __availableToolchainsCount \"${__availableToolchainsCount}-1\" )\n foreach( __idx RANGE ${__availableToolchainsCount} )\n list( GET __availableToolchainArchs ${__idx} __toolchainArch )\n if( __toolchainArch STREQUAL ANDROID_ARCH_FULLNAME )\n list( GET __availableToolchainCompilerVersions ${__idx} __toolchainVersion )\n string( REPLACE \"x\" \"99\" __toolchainVersion \"${__toolchainVersion}\")\n if( __toolchainVersion VERSION_GREATER __toolchainMaxVersion )\n set( __toolchainMaxVersion \"${__toolchainVersion}\" )\n set( __toolchainIdx ${__idx} )\n endif()\n endif()\n endforeach()\n unset( __availableToolchainsCount )\n unset( __toolchainMaxVersion )\n unset( __toolchainVersion )\nendif()\nunset( __toolchainArch )\nif( __toolchainIdx EQUAL -1 )\n message( FATAL_ERROR \"No one of available compiler toolchains is able to compile for ${ANDROID_ARCH_NAME} platform.\" )\nendif()\nlist( GET __availableToolchains ${__toolchainIdx} ANDROID_TOOLCHAIN_NAME )\nlist( GET __availableToolchainMachines ${__toolchainIdx} ANDROID_TOOLCHAIN_MACHINE_NAME )\nlist( GET __availableToolchainCompilerVersions ${__toolchainIdx} ANDROID_COMPILER_VERSION )\n\nunset( __toolchainIdx )\nunset( __availableToolchains )\nunset( __availableToolchainMachines )\nunset( __availableToolchainArchs )\nunset( __availableToolchainCompilerVersions )\n\n# choose native API level\n__INIT_VARIABLE( ANDROID_NATIVE_API_LEVEL ENV_ANDROID_NATIVE_API_LEVEL ANDROID_API_LEVEL ENV_ANDROID_API_LEVEL ANDROID_STANDALONE_TOOLCHAIN_API_LEVEL ANDROID_DEFAULT_NDK_API_LEVEL_${ANDROID_ARCH_NAME} ANDROID_DEFAULT_NDK_API_LEVEL )\nstring( REGEX MATCH \"[0-9]+\" ANDROID_NATIVE_API_LEVEL \"${ANDROID_NATIVE_API_LEVEL}\" )\n# adjust API level\nset( __real_api_level ${ANDROID_DEFAULT_NDK_API_LEVEL_${ANDROID_ARCH_NAME}} )\nforeach( __level ${ANDROID_SUPPORTED_NATIVE_API_LEVELS} )\n if( NOT __level GREATER ANDROID_NATIVE_API_LEVEL AND NOT __level LESS __real_api_level )\n set( __real_api_level ${__level} )\n endif()\nendforeach()\nif( __real_api_level AND NOT ANDROID_NATIVE_API_LEVEL EQUAL __real_api_level )\n message( STATUS \"Adjusting Android API level 'android-${ANDROID_NATIVE_API_LEVEL}' to 'android-${__real_api_level}'\")\n set( ANDROID_NATIVE_API_LEVEL ${__real_api_level} )\nendif()\nunset(__real_api_level)\n# validate\nlist( FIND ANDROID_SUPPORTED_NATIVE_API_LEVELS \"${ANDROID_NATIVE_API_LEVEL}\" __levelIdx )\nif( __levelIdx EQUAL -1 )\n message( SEND_ERROR \"Specified Android native API level 'android-${ANDROID_NATIVE_API_LEVEL}' is not supported by your NDK/toolchain.\" )\nelse()\n if( BUILD_WITH_ANDROID_NDK )\n __DETECT_NATIVE_API_LEVEL( __realApiLevel \"${ANDROID_NDK}/platforms/android-${ANDROID_NATIVE_API_LEVEL}/arch-${ANDROID_ARCH_NAME}/usr/include/android/api-level.h\" )\n if( NOT __realApiLevel EQUAL ANDROID_NATIVE_API_LEVEL )\n message( SEND_ERROR \"Specified Android API level (${ANDROID_NATIVE_API_LEVEL}) does not match to the level found (${__realApiLevel}). Probably your copy of NDK is broken.\" )\n endif()\n unset( __realApiLevel )\n endif()\n set( ANDROID_NATIVE_API_LEVEL \"${ANDROID_NATIVE_API_LEVEL}\" CACHE STRING \"Android API level for native code\" FORCE )\n if( CMAKE_VERSION VERSION_GREATER \"2.8\" )\n list( SORT ANDROID_SUPPORTED_NATIVE_API_LEVELS )\n set_property( CACHE ANDROID_NATIVE_API_LEVEL PROPERTY STRINGS ${ANDROID_SUPPORTED_NATIVE_API_LEVELS} )\n endif()\nendif()\nunset( __levelIdx )\n\n\n# remember target ABI\nset( ANDROID_ABI \"${ANDROID_ABI}\" CACHE STRING \"The target ABI for Android. If arm, then armeabi-v7a is recommended for hardware floating point.\" FORCE )\nif( CMAKE_VERSION VERSION_GREATER \"2.8\" )\n list( SORT ANDROID_SUPPORTED_ABIS_${ANDROID_ARCH_FULLNAME} )\n set_property( CACHE ANDROID_ABI PROPERTY STRINGS ${ANDROID_SUPPORTED_ABIS_${ANDROID_ARCH_FULLNAME}} )\nendif()\n\n\n# runtime choice (STL, rtti, exceptions)\nif( NOT ANDROID_STL )\n # honor legacy ANDROID_USE_STLPORT\n if( DEFINED ANDROID_USE_STLPORT )\n if( ANDROID_USE_STLPORT )\n set( ANDROID_STL stlport_static )\n endif()\n message( WARNING \"You are using an obsolete variable ANDROID_USE_STLPORT to select the STL variant. Use -DANDROID_STL=stlport_static instead.\" )\n endif()\n if( NOT ANDROID_STL )\n set( ANDROID_STL gnustl_static )\n endif()\nendif()\nset( ANDROID_STL \"${ANDROID_STL}\" CACHE STRING \"C++ runtime\" )\nset( ANDROID_STL_FORCE_FEATURES ON CACHE BOOL \"automatically configure rtti and exceptions support based on C++ runtime\" )\nmark_as_advanced( ANDROID_STL ANDROID_STL_FORCE_FEATURES )\n\nif( BUILD_WITH_ANDROID_NDK )\n if( NOT \"${ANDROID_STL}\" MATCHES \"^(none|system|system_re|gabi\\\\+\\\\+_static|gabi\\\\+\\\\+_shared|stlport_static|stlport_shared|gnustl_static|gnustl_shared)$\")\n message( FATAL_ERROR \"ANDROID_STL is set to invalid value \\\"${ANDROID_STL}\\\".\nThe possible values are:\n none -> Do not configure the runtime.\n system -> Use the default minimal system C++ runtime library.\n system_re -> Same as system but with rtti and exceptions.\n gabi++_static -> Use the GAbi++ runtime as a static library.\n gabi++_shared -> Use the GAbi++ runtime as a shared library.\n stlport_static -> Use the STLport runtime as a static library.\n stlport_shared -> Use the STLport runtime as a shared library.\n gnustl_static -> (default) Use the GNU STL as a static library.\n gnustl_shared -> Use the GNU STL as a shared library.\n\" )\n endif()\nelseif( BUILD_WITH_STANDALONE_TOOLCHAIN )\n if( NOT \"${ANDROID_STL}\" MATCHES \"^(none|gnustl_static|gnustl_shared)$\")\n message( FATAL_ERROR \"ANDROID_STL is set to invalid value \\\"${ANDROID_STL}\\\".\nThe possible values are:\n none -> Do not configure the runtime.\n gnustl_static -> (default) Use the GNU STL as a static library.\n gnustl_shared -> Use the GNU STL as a shared library.\n\" )\n endif()\nendif()\n\nunset( ANDROID_RTTI )\nunset( ANDROID_EXCEPTIONS )\nunset( ANDROID_STL_INCLUDE_DIRS )\nunset( __libstl )\nunset( __libsupcxx )\n\nif( NOT _CMAKE_IN_TRY_COMPILE AND ANDROID_NDK_RELEASE STREQUAL \"r7b\" AND ARMEABI_V7A AND NOT VFPV3 AND ANDROID_STL MATCHES \"gnustl\" )\n message( WARNING \"The GNU STL armeabi-v7a binaries from NDK r7b can crash non-NEON devices. The files provided with NDK r7b were not configured properly, resulting in crashes on Tegra2-based devices and others when trying to use certain floating-point functions (e.g., cosf, sinf, expf).\nYou are strongly recommended to switch to another NDK release.\n\" )\nendif()\n\nif( NOT _CMAKE_IN_TRY_COMPILE AND X86 AND ANDROID_STL MATCHES \"gnustl\" AND ANDROID_NDK_RELEASE STREQUAL \"r6\" )\n message( WARNING \"The x86 system header file from NDK r6 has incorrect definition for ptrdiff_t. You are recommended to upgrade to a newer NDK release or manually patch the header:\nSee https://android.googlesource.com/platform/development.git f907f4f9d4e56ccc8093df6fee54454b8bcab6c2\n diff --git a/ndk/platforms/android-9/arch-x86/include/machine/_types.h b/ndk/platforms/android-9/arch-x86/include/machine/_types.h\n index 5e28c64..65892a1 100644\n --- a/ndk/platforms/android-9/arch-x86/include/machine/_types.h\n +++ b/ndk/platforms/android-9/arch-x86/include/machine/_types.h\n @@ -51,7 +51,11 @@ typedef long int ssize_t;\n #endif\n #ifndef _PTRDIFF_T\n #define _PTRDIFF_T\n -typedef long ptrdiff_t;\n +# ifdef __ANDROID__\n + typedef int ptrdiff_t;\n +# else\n + typedef long ptrdiff_t;\n +# endif\n #endif\n\" )\nendif()\n\n\n# setup paths and STL for standalone toolchain\nif( BUILD_WITH_STANDALONE_TOOLCHAIN )\n set( ANDROID_TOOLCHAIN_ROOT \"${ANDROID_STANDALONE_TOOLCHAIN}\" )\n set( ANDROID_CLANG_TOOLCHAIN_ROOT \"${ANDROID_STANDALONE_TOOLCHAIN}\" )\n set( ANDROID_SYSROOT \"${ANDROID_STANDALONE_TOOLCHAIN}/sysroot\" )\n\n if( NOT ANDROID_STL STREQUAL \"none\" )\n set( ANDROID_STL_INCLUDE_DIRS \"${ANDROID_STANDALONE_TOOLCHAIN}/include/c++/${ANDROID_COMPILER_VERSION}\" )\n if( NOT EXISTS \"${ANDROID_STL_INCLUDE_DIRS}\" )\n # old location ( pre r8c )\n set( ANDROID_STL_INCLUDE_DIRS \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/include/c++/${ANDROID_COMPILER_VERSION}\" )\n endif()\n if( ARMEABI_V7A AND EXISTS \"${ANDROID_STL_INCLUDE_DIRS}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/${CMAKE_SYSTEM_PROCESSOR}/bits\" )\n list( APPEND ANDROID_STL_INCLUDE_DIRS \"${ANDROID_STL_INCLUDE_DIRS}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/${CMAKE_SYSTEM_PROCESSOR}\" )\n elseif( ARMEABI AND NOT ANDROID_FORCE_ARM_BUILD AND EXISTS \"${ANDROID_STL_INCLUDE_DIRS}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/thumb/bits\" )\n list( APPEND ANDROID_STL_INCLUDE_DIRS \"${ANDROID_STL_INCLUDE_DIRS}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/thumb\" )\n else()\n list( APPEND ANDROID_STL_INCLUDE_DIRS \"${ANDROID_STL_INCLUDE_DIRS}/${ANDROID_TOOLCHAIN_MACHINE_NAME}\" )\n endif()\n # always search static GNU STL to get the location of libsupc++.a\n if( ARMEABI_V7A AND NOT ANDROID_FORCE_ARM_BUILD AND EXISTS \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/${CMAKE_SYSTEM_PROCESSOR}/thumb/libstdc++.a\" )\n set( __libstl \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/${CMAKE_SYSTEM_PROCESSOR}/thumb\" )\n elseif( ARMEABI_V7A AND EXISTS \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/${CMAKE_SYSTEM_PROCESSOR}/libstdc++.a\" )\n set( __libstl \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/${CMAKE_SYSTEM_PROCESSOR}\" )\n elseif( ARMEABI AND NOT ANDROID_FORCE_ARM_BUILD AND EXISTS \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/thumb/libstdc++.a\" )\n set( __libstl \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/thumb\" )\n elseif( EXISTS \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/libstdc++.a\" )\n set( __libstl \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib\" )\n endif()\n if( __libstl )\n set( __libsupcxx \"${__libstl}/libsupc++.a\" )\n set( __libstl \"${__libstl}/libstdc++.a\" )\n endif()\n if( NOT EXISTS \"${__libsupcxx}\" )\n message( FATAL_ERROR \"The required libstdsupc++.a is missing in your standalone toolchain.\n Usually it happens because of bug in make-standalone-toolchain.sh script from NDK r7, r7b and r7c.\n You need to either upgrade to newer NDK or manually copy\n $ANDROID_NDK/sources/cxx-stl/gnu-libstdc++/libs/${ANDROID_NDK_ABI_NAME}/libsupc++.a\n to\n ${__libsupcxx}\n \" )\n endif()\n if( ANDROID_STL STREQUAL \"gnustl_shared\" )\n if( ARMEABI_V7A AND EXISTS \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/${CMAKE_SYSTEM_PROCESSOR}/libgnustl_shared.so\" )\n set( __libstl \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/${CMAKE_SYSTEM_PROCESSOR}/libgnustl_shared.so\" )\n elseif( ARMEABI AND NOT ANDROID_FORCE_ARM_BUILD AND EXISTS \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/thumb/libgnustl_shared.so\" )\n set( __libstl \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/thumb/libgnustl_shared.so\" )\n elseif( EXISTS \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/libgnustl_shared.so\" )\n set( __libstl \"${ANDROID_STANDALONE_TOOLCHAIN}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/libgnustl_shared.so\" )\n endif()\n endif()\n endif()\nendif()\n\n# clang\nif( \"${ANDROID_TOOLCHAIN_NAME}\" STREQUAL \"standalone-clang\" )\n set( ANDROID_COMPILER_IS_CLANG 1 )\n execute_process( COMMAND \"${ANDROID_CLANG_TOOLCHAIN_ROOT}/bin/clang${TOOL_OS_SUFFIX}\" --version OUTPUT_VARIABLE ANDROID_CLANG_VERSION OUTPUT_STRIP_TRAILING_WHITESPACE )\n string( REGEX MATCH \"[0-9]+[.][0-9]+\" ANDROID_CLANG_VERSION \"${ANDROID_CLANG_VERSION}\")\nelseif( \"${ANDROID_TOOLCHAIN_NAME}\" MATCHES \"-clang3[.][0-9]?$\" )\n string( REGEX MATCH \"3[.][0-9]$\" ANDROID_CLANG_VERSION \"${ANDROID_TOOLCHAIN_NAME}\")\n string( REGEX REPLACE \"-clang${ANDROID_CLANG_VERSION}$\" \"-4.6\" ANDROID_GCC_TOOLCHAIN_NAME \"${ANDROID_TOOLCHAIN_NAME}\" )\n if( NOT EXISTS \"${ANDROID_NDK_TOOLCHAINS_PATH}/llvm-${ANDROID_CLANG_VERSION}${ANDROID_NDK_TOOLCHAINS_SUBPATH}/bin/clang${TOOL_OS_SUFFIX}\" )\n message( FATAL_ERROR \"Could not find the Clang compiler driver\" )\n endif()\n set( ANDROID_COMPILER_IS_CLANG 1 )\n set( ANDROID_CLANG_TOOLCHAIN_ROOT \"${ANDROID_NDK_TOOLCHAINS_PATH}/llvm-${ANDROID_CLANG_VERSION}${ANDROID_NDK_TOOLCHAINS_SUBPATH}\" )\nelse()\n set( ANDROID_GCC_TOOLCHAIN_NAME \"${ANDROID_TOOLCHAIN_NAME}\" )\n unset( ANDROID_COMPILER_IS_CLANG CACHE )\nendif()\n\nstring( REPLACE \".\" \"\" _clang_name \"clang${ANDROID_CLANG_VERSION}\" )\nif( NOT EXISTS \"${ANDROID_CLANG_TOOLCHAIN_ROOT}/bin/${_clang_name}${TOOL_OS_SUFFIX}\" )\n set( _clang_name \"clang\" )\nendif()\n\n\n# setup paths and STL for NDK\nif( BUILD_WITH_ANDROID_NDK )\n set( ANDROID_TOOLCHAIN_ROOT \"${ANDROID_NDK_TOOLCHAINS_PATH}/${ANDROID_GCC_TOOLCHAIN_NAME}${ANDROID_NDK_TOOLCHAINS_SUBPATH}\" )\n set( ANDROID_SYSROOT \"${ANDROID_NDK}/platforms/android-${ANDROID_NATIVE_API_LEVEL}/arch-${ANDROID_ARCH_NAME}\" )\n\n if( ANDROID_STL STREQUAL \"none\" )\n # do nothing\n elseif( ANDROID_STL STREQUAL \"system\" )\n set( ANDROID_RTTI OFF )\n set( ANDROID_EXCEPTIONS OFF )\n set( ANDROID_STL_INCLUDE_DIRS \"${ANDROID_NDK}/sources/cxx-stl/system/include\" )\n elseif( ANDROID_STL STREQUAL \"system_re\" )\n set( ANDROID_RTTI ON )\n set( ANDROID_EXCEPTIONS ON )\n set( ANDROID_STL_INCLUDE_DIRS \"${ANDROID_NDK}/sources/cxx-stl/system/include\" )\n elseif( ANDROID_STL MATCHES \"gabi\" )\n if( ANDROID_NDK_RELEASE STRLESS \"r7\" )\n message( FATAL_ERROR \"gabi++ is not awailable in your NDK. You have to upgrade to NDK r7 or newer to use gabi++.\")\n endif()\n set( ANDROID_RTTI ON )\n set( ANDROID_EXCEPTIONS OFF )\n set( ANDROID_STL_INCLUDE_DIRS \"${ANDROID_NDK}/sources/cxx-stl/gabi++/include\" )\n set( __libstl \"${ANDROID_NDK}/sources/cxx-stl/gabi++/libs/${ANDROID_NDK_ABI_NAME}/libgabi++_static.a\" )\n elseif( ANDROID_STL MATCHES \"stlport\" )\n if( NOT ANDROID_NDK_RELEASE STRLESS \"r8d\" )\n set( ANDROID_EXCEPTIONS ON )\n else()\n set( ANDROID_EXCEPTIONS OFF )\n endif()\n if( ANDROID_NDK_RELEASE STRLESS \"r7\" )\n set( ANDROID_RTTI OFF )\n else()\n set( ANDROID_RTTI ON )\n endif()\n set( ANDROID_STL_INCLUDE_DIRS \"${ANDROID_NDK}/sources/cxx-stl/stlport/stlport\" )\n set( __libstl \"${ANDROID_NDK}/sources/cxx-stl/stlport/libs/${ANDROID_NDK_ABI_NAME}/libstlport_static.a\" )\n elseif( ANDROID_STL MATCHES \"gnustl\" )\n set( ANDROID_EXCEPTIONS ON )\n set( ANDROID_RTTI ON )\n if( EXISTS \"${ANDROID_NDK}/sources/cxx-stl/gnu-libstdc++/${ANDROID_COMPILER_VERSION}\" )\n if( ARMEABI_V7A AND ANDROID_COMPILER_VERSION VERSION_EQUAL \"4.7\" AND ANDROID_NDK_RELEASE STREQUAL \"r8d\" )\n # gnustl binary for 4.7 compiler is buggy :(\n # TODO: look for right fix\n set( __libstl \"${ANDROID_NDK}/sources/cxx-stl/gnu-libstdc++/4.6\" )\n else()\n set( __libstl \"${ANDROID_NDK}/sources/cxx-stl/gnu-libstdc++/${ANDROID_COMPILER_VERSION}\" )\n endif()\n else()\n set( __libstl \"${ANDROID_NDK}/sources/cxx-stl/gnu-libstdc++\" )\n endif()\n set( ANDROID_STL_INCLUDE_DIRS \"${__libstl}/include\" \"${__libstl}/libs/${ANDROID_NDK_ABI_NAME}/include\" )\n if( EXISTS \"${__libstl}/libs/${ANDROID_NDK_ABI_NAME}/libgnustl_static.a\" )\n set( __libstl \"${__libstl}/libs/${ANDROID_NDK_ABI_NAME}/libgnustl_static.a\" )\n else()\n set( __libstl \"${__libstl}/libs/${ANDROID_NDK_ABI_NAME}/libstdc++.a\" )\n endif()\n else()\n message( FATAL_ERROR \"Unknown runtime: ${ANDROID_STL}\" )\n endif()\n # find libsupc++.a - rtti & exceptions\n if( ANDROID_STL STREQUAL \"system_re\" OR ANDROID_STL MATCHES \"gnustl\" )\n set( __libsupcxx \"${ANDROID_NDK}/sources/cxx-stl/gnu-libstdc++/${ANDROID_COMPILER_VERSION}/libs/${ANDROID_NDK_ABI_NAME}/libsupc++.a\" ) # r8b or newer\n if( NOT EXISTS \"${__libsupcxx}\" )\n set( __libsupcxx \"${ANDROID_NDK}/sources/cxx-stl/gnu-libstdc++/libs/${ANDROID_NDK_ABI_NAME}/libsupc++.a\" ) # r7-r8\n endif()\n if( NOT EXISTS \"${__libsupcxx}\" ) # before r7\n if( ARMEABI_V7A )\n if( ANDROID_FORCE_ARM_BUILD )\n set( __libsupcxx \"${ANDROID_TOOLCHAIN_ROOT}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/${CMAKE_SYSTEM_PROCESSOR}/libsupc++.a\" )\n else()\n set( __libsupcxx \"${ANDROID_TOOLCHAIN_ROOT}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/${CMAKE_SYSTEM_PROCESSOR}/thumb/libsupc++.a\" )\n endif()\n elseif( ARMEABI AND NOT ANDROID_FORCE_ARM_BUILD )\n set( __libsupcxx \"${ANDROID_TOOLCHAIN_ROOT}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/thumb/libsupc++.a\" )\n else()\n set( __libsupcxx \"${ANDROID_TOOLCHAIN_ROOT}/${ANDROID_TOOLCHAIN_MACHINE_NAME}/lib/libsupc++.a\" )\n endif()\n endif()\n if( NOT EXISTS \"${__libsupcxx}\")\n message( ERROR \"Could not find libsupc++.a for a chosen platform. Either your NDK is not supported or is broken.\")\n endif()\n endif()\nendif()\n\n\n# case of shared STL linkage\nif( ANDROID_STL MATCHES \"shared\" AND DEFINED __libstl )\n string( REPLACE \"_static.a\" \"_shared.so\" __libstl \"${__libstl}\" )\n # TODO: check if .so file exists before the renaming\nendif()\n\n\n# ccache support\n__INIT_VARIABLE( _ndk_ccache NDK_CCACHE ENV_NDK_CCACHE )\nif( _ndk_ccache )\n if( DEFINED NDK_CCACHE AND NOT EXISTS NDK_CCACHE )\n unset( NDK_CCACHE CACHE )\n endif()\n find_program( NDK_CCACHE \"${_ndk_ccache}\" DOC \"The path to ccache binary\")\nelse()\n unset( NDK_CCACHE CACHE )\nendif()\nunset( _ndk_ccache )\n\n\n# setup the cross-compiler\nif( NOT CMAKE_C_COMPILER )\n if( NDK_CCACHE AND NOT ANDROID_SYSROOT MATCHES \"[ ;\\\"]\" )\n set( CMAKE_C_COMPILER \"${NDK_CCACHE}\" CACHE PATH \"ccache as C compiler\" )\n set( CMAKE_CXX_COMPILER \"${NDK_CCACHE}\" CACHE PATH \"ccache as C++ compiler\" )\n if( ANDROID_COMPILER_IS_CLANG )\n set( CMAKE_C_COMPILER_ARG1 \"${ANDROID_CLANG_TOOLCHAIN_ROOT}/bin/${_clang_name}${TOOL_OS_SUFFIX}\" CACHE PATH \"C compiler\")\n set( CMAKE_CXX_COMPILER_ARG1 \"${ANDROID_CLANG_TOOLCHAIN_ROOT}/bin/${_clang_name}++${TOOL_OS_SUFFIX}\" CACHE PATH \"C++ compiler\")\n else()\n set( CMAKE_C_COMPILER_ARG1 \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-gcc${TOOL_OS_SUFFIX}\" CACHE PATH \"C compiler\")\n set( CMAKE_CXX_COMPILER_ARG1 \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-g++${TOOL_OS_SUFFIX}\" CACHE PATH \"C++ compiler\")\n endif()\n else()\n if( ANDROID_COMPILER_IS_CLANG )\n set( CMAKE_C_COMPILER \"${ANDROID_CLANG_TOOLCHAIN_ROOT}/bin/${_clang_name}${TOOL_OS_SUFFIX}\" CACHE PATH \"C compiler\")\n set( CMAKE_CXX_COMPILER \"${ANDROID_CLANG_TOOLCHAIN_ROOT}/bin/${_clang_name}++${TOOL_OS_SUFFIX}\" CACHE PATH \"C++ compiler\")\n else()\n set( CMAKE_C_COMPILER \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-gcc${TOOL_OS_SUFFIX}\" CACHE PATH \"C compiler\" )\n set( CMAKE_CXX_COMPILER \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-g++${TOOL_OS_SUFFIX}\" CACHE PATH \"C++ compiler\" )\n endif()\n endif()\n set( CMAKE_ASM_COMPILER \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-gcc${TOOL_OS_SUFFIX}\" CACHE PATH \"assembler\" )\n set( CMAKE_STRIP \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-strip${TOOL_OS_SUFFIX}\" CACHE PATH \"strip\" )\n set( CMAKE_AR \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-ar${TOOL_OS_SUFFIX}\" CACHE PATH \"archive\" )\n set( CMAKE_LINKER \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-ld${TOOL_OS_SUFFIX}\" CACHE PATH \"linker\" )\n set( CMAKE_NM \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-nm${TOOL_OS_SUFFIX}\" CACHE PATH \"nm\" )\n set( CMAKE_OBJCOPY \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-objcopy${TOOL_OS_SUFFIX}\" CACHE PATH \"objcopy\" )\n set( CMAKE_OBJDUMP \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-objdump${TOOL_OS_SUFFIX}\" CACHE PATH \"objdump\" )\n set( CMAKE_RANLIB \"${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_MACHINE_NAME}-ranlib${TOOL_OS_SUFFIX}\" CACHE PATH \"ranlib\" )\nendif()\n\nset( _CMAKE_TOOLCHAIN_PREFIX \"${ANDROID_TOOLCHAIN_MACHINE_NAME}-\" )\nif( CMAKE_VERSION VERSION_LESS 2.8.5 )\n set( CMAKE_ASM_COMPILER_ARG1 \"-c\" )\nendif()\nif( APPLE )\n find_program( CMAKE_INSTALL_NAME_TOOL NAMES install_name_tool )\n if( NOT CMAKE_INSTALL_NAME_TOOL )\n message( FATAL_ERROR \"Could not find install_name_tool, please check your installation.\" )\n endif()\n mark_as_advanced( CMAKE_INSTALL_NAME_TOOL )\nendif()\n\n# Force set compilers because standard identification works badly for us\ninclude( CMakeForceCompiler )\nCMAKE_FORCE_C_COMPILER( \"${CMAKE_C_COMPILER}\" GNU )\nif( ANDROID_COMPILER_IS_CLANG )\n set( CMAKE_C_COMPILER_ID Clang)\nendif()\nset( CMAKE_C_PLATFORM_ID Linux )\nset( CMAKE_C_SIZEOF_DATA_PTR 4 )\nset( CMAKE_C_HAS_ISYSROOT 1 )\nset( CMAKE_C_COMPILER_ABI ELF )\nCMAKE_FORCE_CXX_COMPILER( \"${CMAKE_CXX_COMPILER}\" GNU )\nif( ANDROID_COMPILER_IS_CLANG )\n set( CMAKE_CXX_COMPILER_ID Clang)\nendif()\nset( CMAKE_CXX_PLATFORM_ID Linux )\nset( CMAKE_CXX_SIZEOF_DATA_PTR 4 )\nset( CMAKE_CXX_HAS_ISYSROOT 1 )\nset( CMAKE_CXX_COMPILER_ABI ELF )\nset( CMAKE_CXX_SOURCE_FILE_EXTENSIONS cc cp cxx cpp CPP c++ C )\n# force ASM compiler (required for CMake < 2.8.5)\nset( CMAKE_ASM_COMPILER_ID_RUN TRUE )\nset( CMAKE_ASM_COMPILER_ID GNU )\nset( CMAKE_ASM_COMPILER_WORKS TRUE )\nset( CMAKE_ASM_COMPILER_FORCED TRUE )\nset( CMAKE_COMPILER_IS_GNUASM 1)\nset( CMAKE_ASM_SOURCE_FILE_EXTENSIONS s S asm )\n\n# flags and definitions\nremove_definitions( -DANDROID )\nadd_definitions( -DANDROID )\n\nif( ANDROID_SYSROOT MATCHES \"[ ;\\\"]\" )\n if( CMAKE_HOST_WIN32 )\n # try to convert path to 8.3 form\n file( WRITE \"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/cvt83.cmd\" \"@echo %~s1\" )\n execute_process( COMMAND \"$ENV{ComSpec}\" /c \"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/cvt83.cmd\" \"${ANDROID_SYSROOT}\"\n OUTPUT_VARIABLE __path OUTPUT_STRIP_TRAILING_WHITESPACE\n RESULT_VARIABLE __result ERROR_QUIET )\n if( __result EQUAL 0 )\n file( TO_CMAKE_PATH \"${__path}\" ANDROID_SYSROOT )\n set( ANDROID_CXX_FLAGS \"--sysroot=${ANDROID_SYSROOT}\" )\n else()\n set( ANDROID_CXX_FLAGS \"--sysroot=\\\"${ANDROID_SYSROOT}\\\"\" )\n endif()\n else()\n set( ANDROID_CXX_FLAGS \"'--sysroot=${ANDROID_SYSROOT}'\" )\n endif()\n if( NOT _CMAKE_IN_TRY_COMPILE )\n # quotes can break try_compile and compiler identification\n message(WARNING \"Path to your Android NDK (or toolchain) has non-alphanumeric symbols.\\nThe build might be broken.\\n\")\n endif()\nelse()\n set( ANDROID_CXX_FLAGS \"--sysroot=${ANDROID_SYSROOT}\" )\nendif()\n\n# NDK flags\nif( ARMEABI OR ARMEABI_V7A )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -fpic -funwind-tables\" )\n if( NOT ANDROID_FORCE_ARM_BUILD AND NOT ARMEABI_V6 )\n set( ANDROID_CXX_FLAGS_RELEASE \"-mthumb -fomit-frame-pointer -fno-strict-aliasing\" )\n set( ANDROID_CXX_FLAGS_DEBUG \"-marm -fno-omit-frame-pointer -fno-strict-aliasing\" )\n if( NOT ANDROID_COMPILER_IS_CLANG )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -finline-limit=64\" )\n endif()\n else()\n # always compile ARMEABI_V6 in arm mode; otherwise there is no difference from ARMEABI\n set( ANDROID_CXX_FLAGS_RELEASE \"-marm -fomit-frame-pointer -fstrict-aliasing\" )\n set( ANDROID_CXX_FLAGS_DEBUG \"-marm -fno-omit-frame-pointer -fno-strict-aliasing\" )\n if( NOT ANDROID_COMPILER_IS_CLANG )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -funswitch-loops -finline-limit=300\" )\n endif()\n endif()\nelseif( X86 )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -funwind-tables\" )\n if( NOT ANDROID_COMPILER_IS_CLANG )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -funswitch-loops -finline-limit=300\" )\n else()\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -fPIC\" )\n endif()\n set( ANDROID_CXX_FLAGS_RELEASE \"-fomit-frame-pointer -fstrict-aliasing\" )\n set( ANDROID_CXX_FLAGS_DEBUG \"-fno-omit-frame-pointer -fno-strict-aliasing\" )\nelseif( MIPS )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -fpic -fno-strict-aliasing -finline-functions -ffunction-sections -funwind-tables -fmessage-length=0\" )\n set( ANDROID_CXX_FLAGS_RELEASE \"-fomit-frame-pointer\" )\n set( ANDROID_CXX_FLAGS_DEBUG \"-fno-omit-frame-pointer\" )\n if( NOT ANDROID_COMPILER_IS_CLANG )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -fno-inline-functions-called-once -fgcse-after-reload -frerun-cse-after-loop -frename-registers\" )\n set( ANDROID_CXX_FLAGS_RELEASE \"${ANDROID_CXX_FLAGS_RELEASE} -funswitch-loops -finline-limit=300\" )\n endif()\nelseif()\n set( ANDROID_CXX_FLAGS_RELEASE \"\" )\n set( ANDROID_CXX_FLAGS_DEBUG \"\" )\nendif()\n\nset( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -fsigned-char\" ) # good/necessary when porting desktop libraries\n\nif( NOT X86 AND NOT ANDROID_COMPILER_IS_CLANG )\n set( ANDROID_CXX_FLAGS \"-Wno-psabi ${ANDROID_CXX_FLAGS}\" )\nendif()\n\nif( NOT ANDROID_COMPILER_VERSION VERSION_LESS \"4.6\" )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -no-canonical-prefixes\" ) # see https://android-review.googlesource.com/#/c/47564/\nendif()\n\n# ABI-specific flags\nif( ARMEABI_V7A )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -march=armv7-a -mfloat-abi=softfp\" )\n if( NEON )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -mfpu=neon\" )\n elseif( VFPV3 )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -mfpu=vfpv3\" )\n else()\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -mfpu=vfpv3-d16\" )\n endif()\nelseif( ARMEABI_V6 )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -march=armv6 -mfloat-abi=softfp -mfpu=vfp\" ) # vfp == vfpv2\nelseif( ARMEABI )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -march=armv5te -mtune=xscale -msoft-float\" )\nendif()\n\nif( ANDROID_STL MATCHES \"gnustl\" AND (EXISTS \"${__libstl}\" OR EXISTS \"${__libsupcxx}\") )\n set( CMAKE_CXX_CREATE_SHARED_LIBRARY \" -o \" )\n set( CMAKE_CXX_CREATE_SHARED_MODULE \" -o \" )\n set( CMAKE_CXX_LINK_EXECUTABLE \" -o \" )\nelse()\n set( CMAKE_CXX_CREATE_SHARED_LIBRARY \" -o \" )\n set( CMAKE_CXX_CREATE_SHARED_MODULE \" -o \" )\n set( CMAKE_CXX_LINK_EXECUTABLE \" -o \" )\nendif()\n\n# STL\nif( EXISTS \"${__libstl}\" OR EXISTS \"${__libsupcxx}\" )\n if( EXISTS \"${__libstl}\" )\n set( CMAKE_CXX_CREATE_SHARED_LIBRARY \"${CMAKE_CXX_CREATE_SHARED_LIBRARY} \\\"${__libstl}\\\"\" )\n set( CMAKE_CXX_CREATE_SHARED_MODULE \"${CMAKE_CXX_CREATE_SHARED_MODULE} \\\"${__libstl}\\\"\" )\n set( CMAKE_CXX_LINK_EXECUTABLE \"${CMAKE_CXX_LINK_EXECUTABLE} \\\"${__libstl}\\\"\" )\n endif()\n if( EXISTS \"${__libsupcxx}\" )\n set( CMAKE_CXX_CREATE_SHARED_LIBRARY \"${CMAKE_CXX_CREATE_SHARED_LIBRARY} \\\"${__libsupcxx}\\\"\" )\n set( CMAKE_CXX_CREATE_SHARED_MODULE \"${CMAKE_CXX_CREATE_SHARED_MODULE} \\\"${__libsupcxx}\\\"\" )\n set( CMAKE_CXX_LINK_EXECUTABLE \"${CMAKE_CXX_LINK_EXECUTABLE} \\\"${__libsupcxx}\\\"\" )\n # C objects:\n set( CMAKE_C_CREATE_SHARED_LIBRARY \" -o \" )\n set( CMAKE_C_CREATE_SHARED_MODULE \" -o \" )\n set( CMAKE_C_LINK_EXECUTABLE \" -o \" )\n set( CMAKE_C_CREATE_SHARED_LIBRARY \"${CMAKE_C_CREATE_SHARED_LIBRARY} \\\"${__libsupcxx}\\\"\" )\n set( CMAKE_C_CREATE_SHARED_MODULE \"${CMAKE_C_CREATE_SHARED_MODULE} \\\"${__libsupcxx}\\\"\" )\n set( CMAKE_C_LINK_EXECUTABLE \"${CMAKE_C_LINK_EXECUTABLE} \\\"${__libsupcxx}\\\"\" )\n endif()\n if( ANDROID_STL MATCHES \"gnustl\" )\n if( NOT EXISTS \"${ANDROID_LIBM_PATH}\" )\n set( ANDROID_LIBM_PATH -lm )\n endif()\n set( CMAKE_CXX_CREATE_SHARED_LIBRARY \"${CMAKE_CXX_CREATE_SHARED_LIBRARY} ${ANDROID_LIBM_PATH}\" )\n set( CMAKE_CXX_CREATE_SHARED_MODULE \"${CMAKE_CXX_CREATE_SHARED_MODULE} ${ANDROID_LIBM_PATH}\" )\n set( CMAKE_CXX_LINK_EXECUTABLE \"${CMAKE_CXX_LINK_EXECUTABLE} ${ANDROID_LIBM_PATH}\" )\n endif()\nendif()\n\n# variables controlling optional build flags\nif (ANDROID_NDK_RELEASE STRLESS \"r7\")\n # libGLESv2.so in NDK's prior to r7 refers to missing external symbols.\n # So this flag option is required for all projects using OpenGL from native.\n __INIT_VARIABLE( ANDROID_SO_UNDEFINED VALUES ON )\nelse()\n __INIT_VARIABLE( ANDROID_SO_UNDEFINED VALUES OFF )\nendif()\n__INIT_VARIABLE( ANDROID_NO_UNDEFINED OBSOLETE_NO_UNDEFINED VALUES ON )\n__INIT_VARIABLE( ANDROID_FUNCTION_LEVEL_LINKING VALUES ON )\n__INIT_VARIABLE( ANDROID_GOLD_LINKER VALUES ON )\n__INIT_VARIABLE( ANDROID_NOEXECSTACK VALUES ON )\n__INIT_VARIABLE( ANDROID_RELRO VALUES ON )\n\nset( ANDROID_NO_UNDEFINED ${ANDROID_NO_UNDEFINED} CACHE BOOL \"Show all undefined symbols as linker errors\" )\nset( ANDROID_SO_UNDEFINED ${ANDROID_SO_UNDEFINED} CACHE BOOL \"Allows or disallows undefined symbols in shared libraries\" )\nset( ANDROID_FUNCTION_LEVEL_LINKING ${ANDROID_FUNCTION_LEVEL_LINKING} CACHE BOOL \"Allows or disallows undefined symbols in shared libraries\" )\nset( ANDROID_GOLD_LINKER ${ANDROID_GOLD_LINKER} CACHE BOOL \"Enables gold linker (only avaialble for NDK r8b for ARM and x86 architectures on linux-86 and darwin-x86 hosts)\" )\nset( ANDROID_NOEXECSTACK ${ANDROID_NOEXECSTACK} CACHE BOOL \"Allows or disallows undefined symbols in shared libraries\" )\nset( ANDROID_RELRO ${ANDROID_RELRO} CACHE BOOL \"Enables RELRO - a memory corruption mitigation technique\" )\nmark_as_advanced( ANDROID_NO_UNDEFINED ANDROID_SO_UNDEFINED ANDROID_FUNCTION_LEVEL_LINKING ANDROID_GOLD_LINKER ANDROID_NOEXECSTACK ANDROID_RELRO )\n\n# linker flags\nset( ANDROID_LINKER_FLAGS \"\" )\n\nif( ARMEABI_V7A )\n # this is *required* to use the following linker flags that routes around\n # a CPU bug in some Cortex-A8 implementations:\n set( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} -Wl,--fix-cortex-a8\" )\nendif()\n\nif( ANDROID_NO_UNDEFINED )\n if( MIPS )\n # there is some sysroot-related problem in mips linker...\n if( NOT ANDROID_SYSROOT MATCHES \"[ ;\\\"]\" )\n set( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} -Wl,--no-undefined -Wl,-rpath-link,${ANDROID_SYSROOT}/usr/lib\" )\n endif()\n else()\n set( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} -Wl,--no-undefined\" )\n endif()\nendif()\n\nif( ANDROID_SO_UNDEFINED )\n set( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} -Wl,-allow-shlib-undefined\" )\nendif()\n\nif( ANDROID_FUNCTION_LEVEL_LINKING )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -fdata-sections -ffunction-sections\" )\n set( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} -Wl,--gc-sections\" )\nendif()\n\nif( ANDROID_COMPILER_VERSION VERSION_EQUAL \"4.6\" )\n if( ANDROID_GOLD_LINKER AND (CMAKE_HOST_UNIX OR ANDROID_NDK_RELEASE STRGREATER \"r8b\") AND (ARMEABI OR ARMEABI_V7A OR X86) )\n set( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} -fuse-ld=gold\" )\n elseif( ANDROID_NDK_RELEASE STRGREATER \"r8b\")\n set( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} -fuse-ld=bfd\" )\n elseif( ANDROID_NDK_RELEASE STREQUAL \"r8b\" AND ARMEABI AND NOT _CMAKE_IN_TRY_COMPILE )\n message( WARNING \"The default bfd linker from arm GCC 4.6 toolchain can fail with 'unresolvable R_ARM_THM_CALL relocation' error message. See https://code.google.com/p/android/issues/detail?id=35342\n On Linux and OS X host platform you can workaround this problem using gold linker (default).\n Rerun cmake with -DANDROID_GOLD_LINKER=ON option in case of problems.\n\" )\n endif()\nendif() # version 4.6\n\nif( ANDROID_NOEXECSTACK )\n if( ANDROID_COMPILER_IS_CLANG )\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -Xclang -mnoexecstack\" )\n else()\n set( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS} -Wa,--noexecstack\" )\n endif()\n set( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} -Wl,-z,noexecstack\" )\nendif()\n\nif( ANDROID_RELRO )\n set( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} -Wl,-z,relro -Wl,-z,now\" )\nendif()\n\nif( ANDROID_COMPILER_IS_CLANG )\n set( ANDROID_CXX_FLAGS \"-Qunused-arguments ${ANDROID_CXX_FLAGS}\" )\n if( ARMEABI_V7A AND NOT ANDROID_FORCE_ARM_BUILD )\n set( ANDROID_CXX_FLAGS_RELEASE \"-target thumbv7-none-linux-androideabi ${ANDROID_CXX_FLAGS_RELEASE}\" )\n set( ANDROID_CXX_FLAGS_DEBUG \"-target ${ANDROID_LLVM_TRIPLE} ${ANDROID_CXX_FLAGS_DEBUG}\" )\n else()\n set( ANDROID_CXX_FLAGS \"-target ${ANDROID_LLVM_TRIPLE} ${ANDROID_CXX_FLAGS}\" )\n endif()\n if( BUILD_WITH_ANDROID_NDK )\n set( ANDROID_CXX_FLAGS \"-gcc-toolchain ${ANDROID_TOOLCHAIN_ROOT} ${ANDROID_CXX_FLAGS}\" )\n endif()\nendif()\n\n# cache flags\nset( CMAKE_CXX_FLAGS \"\" CACHE STRING \"c++ flags\" )\nset( CMAKE_C_FLAGS \"\" CACHE STRING \"c flags\" )\nset( CMAKE_CXX_FLAGS_RELEASE \"-O3 -DNDEBUG\" CACHE STRING \"c++ Release flags\" )\nset( CMAKE_C_FLAGS_RELEASE \"-O3 -DNDEBUG\" CACHE STRING \"c Release flags\" )\nset( CMAKE_CXX_FLAGS_DEBUG \"-O0 -g -DDEBUG -D_DEBUG\" CACHE STRING \"c++ Debug flags\" )\nset( CMAKE_C_FLAGS_DEBUG \"-O0 -g -DDEBUG -D_DEBUG\" CACHE STRING \"c Debug flags\" )\nset( CMAKE_SHARED_LINKER_FLAGS \"\" CACHE STRING \"shared linker flags\" )\nset( CMAKE_MODULE_LINKER_FLAGS \"\" CACHE STRING \"module linker flags\" )\nset( CMAKE_EXE_LINKER_FLAGS \"-Wl,-z,nocopyreloc\" CACHE STRING \"executable linker flags\" )\n\n# put flags to cache (for debug purpose only)\nset( ANDROID_CXX_FLAGS \"${ANDROID_CXX_FLAGS}\" CACHE INTERNAL \"Android specific c/c++ flags\" )\nset( ANDROID_CXX_FLAGS_RELEASE \"${ANDROID_CXX_FLAGS_RELEASE}\" CACHE INTERNAL \"Android specific c/c++ Release flags\" )\nset( ANDROID_CXX_FLAGS_DEBUG \"${ANDROID_CXX_FLAGS_DEBUG}\" CACHE INTERNAL \"Android specific c/c++ Debug flags\" )\nset( ANDROID_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS}\" CACHE INTERNAL \"Android specific c/c++ linker flags\" )\n\n# finish flags\nset( CMAKE_CXX_FLAGS \"${ANDROID_CXX_FLAGS} ${CMAKE_CXX_FLAGS}\" )\nset( CMAKE_C_FLAGS \"${ANDROID_CXX_FLAGS} ${CMAKE_C_FLAGS}\" )\nset( CMAKE_CXX_FLAGS_RELEASE \"${ANDROID_CXX_FLAGS_RELEASE} ${CMAKE_CXX_FLAGS_RELEASE}\" )\nset( CMAKE_C_FLAGS_RELEASE \"${ANDROID_CXX_FLAGS_RELEASE} ${CMAKE_C_FLAGS_RELEASE}\" )\nset( CMAKE_CXX_FLAGS_DEBUG \"${ANDROID_CXX_FLAGS_DEBUG} ${CMAKE_CXX_FLAGS_DEBUG}\" )\nset( CMAKE_C_FLAGS_DEBUG \"${ANDROID_CXX_FLAGS_DEBUG} ${CMAKE_C_FLAGS_DEBUG}\" )\nset( CMAKE_SHARED_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS}\" )\nset( CMAKE_MODULE_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} ${CMAKE_MODULE_LINKER_FLAGS}\" )\nset( CMAKE_EXE_LINKER_FLAGS \"${ANDROID_LINKER_FLAGS} ${CMAKE_EXE_LINKER_FLAGS}\" )\n\nif( MIPS AND BUILD_WITH_ANDROID_NDK AND ANDROID_NDK_RELEASE STREQUAL \"r8\" )\n set( CMAKE_SHARED_LINKER_FLAGS \"-Wl,-T,${ANDROID_NDK_TOOLCHAINS_PATH}/${ANDROID_GCC_TOOLCHAIN_NAME}/mipself.xsc ${CMAKE_SHARED_LINKER_FLAGS}\" )\n set( CMAKE_MODULE_LINKER_FLAGS \"-Wl,-T,${ANDROID_NDK_TOOLCHAINS_PATH}/${ANDROID_GCC_TOOLCHAIN_NAME}/mipself.xsc ${CMAKE_MODULE_LINKER_FLAGS}\" )\n set( CMAKE_EXE_LINKER_FLAGS \"-Wl,-T,${ANDROID_NDK_TOOLCHAINS_PATH}/${ANDROID_GCC_TOOLCHAIN_NAME}/mipself.x ${CMAKE_EXE_LINKER_FLAGS}\" )\nendif()\n\n# configure rtti\nif( DEFINED ANDROID_RTTI AND ANDROID_STL_FORCE_FEATURES )\n if( ANDROID_RTTI )\n set( CMAKE_CXX_FLAGS \"-frtti ${CMAKE_CXX_FLAGS}\" )\n else()\n set( CMAKE_CXX_FLAGS \"-fno-rtti ${CMAKE_CXX_FLAGS}\" )\n endif()\nendif()\n\n# configure exceptios\nif( DEFINED ANDROID_EXCEPTIONS AND ANDROID_STL_FORCE_FEATURES )\n if( ANDROID_EXCEPTIONS )\n set( CMAKE_CXX_FLAGS \"-fexceptions ${CMAKE_CXX_FLAGS}\" )\n set( CMAKE_C_FLAGS \"-fexceptions ${CMAKE_C_FLAGS}\" )\n else()\n set( CMAKE_CXX_FLAGS \"-fno-exceptions ${CMAKE_CXX_FLAGS}\" )\n set( CMAKE_C_FLAGS \"-fno-exceptions ${CMAKE_C_FLAGS}\" )\n endif()\nendif()\n\n# global includes and link directories\ninclude_directories( SYSTEM \"${ANDROID_SYSROOT}/usr/include\" ${ANDROID_STL_INCLUDE_DIRS} )\nget_filename_component(__android_install_path \"${CMAKE_INSTALL_PREFIX}/libs/${ANDROID_NDK_ABI_NAME}\" ABSOLUTE) # avoid CMP0015 policy warning\nlink_directories( \"${__android_install_path}\" )\n\n# detect if need link crtbegin_so.o explicitly\nif( NOT DEFINED ANDROID_EXPLICIT_CRT_LINK )\n set( __cmd \"${CMAKE_CXX_CREATE_SHARED_LIBRARY}\" )\n string( REPLACE \"\" \"${CMAKE_CXX_COMPILER} ${CMAKE_CXX_COMPILER_ARG1}\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"${CMAKE_C_COMPILER} ${CMAKE_C_COMPILER_ARG1}\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"${CMAKE_CXX_FLAGS}\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"${CMAKE_SHARED_LINKER_FLAGS}\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"-shared\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/toolchain_crtlink_test.so\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"\\\"${ANDROID_SYSROOT}/usr/lib/crtbegin_so.o\\\"\" __cmd \"${__cmd}\" )\n string( REPLACE \"\" \"\" __cmd \"${__cmd}\" )\n separate_arguments( __cmd )\n foreach( __var ANDROID_NDK ANDROID_NDK_TOOLCHAINS_PATH ANDROID_STANDALONE_TOOLCHAIN )\n if( ${__var} )\n set( __tmp \"${${__var}}\" )\n separate_arguments( __tmp )\n string( REPLACE \"${__tmp}\" \"${${__var}}\" __cmd \"${__cmd}\")\n endif()\n endforeach()\n string( REPLACE \"'\" \"\" __cmd \"${__cmd}\" )\n string( REPLACE \"\\\"\" \"\" __cmd \"${__cmd}\" )\n execute_process( COMMAND ${__cmd} RESULT_VARIABLE __cmd_result OUTPUT_QUIET ERROR_QUIET )\n if( __cmd_result EQUAL 0 )\n set( ANDROID_EXPLICIT_CRT_LINK ON )\n else()\n set( ANDROID_EXPLICIT_CRT_LINK OFF )\n endif()\nendif()\n\nif( ANDROID_EXPLICIT_CRT_LINK )\n set( CMAKE_CXX_CREATE_SHARED_LIBRARY \"${CMAKE_CXX_CREATE_SHARED_LIBRARY} \\\"${ANDROID_SYSROOT}/usr/lib/crtbegin_so.o\\\"\" )\n set( CMAKE_CXX_CREATE_SHARED_MODULE \"${CMAKE_CXX_CREATE_SHARED_MODULE} \\\"${ANDROID_SYSROOT}/usr/lib/crtbegin_so.o\\\"\" )\nendif()\n\n# setup output directories\nset( LIBRARY_OUTPUT_PATH_ROOT ${CMAKE_SOURCE_DIR} CACHE PATH \"root for library output, set this to change where android libs are installed to\" )\nset( CMAKE_INSTALL_PREFIX \"${ANDROID_TOOLCHAIN_ROOT}/user\" CACHE STRING \"path for installing\" )\n\nif(NOT _CMAKE_IN_TRY_COMPILE)\n if( EXISTS \"${CMAKE_SOURCE_DIR}/jni/CMakeLists.txt\" )\n set( EXECUTABLE_OUTPUT_PATH \"${LIBRARY_OUTPUT_PATH_ROOT}/bin/${ANDROID_NDK_ABI_NAME}\" CACHE PATH \"Output directory for applications\" )\n else()\n set( EXECUTABLE_OUTPUT_PATH \"${LIBRARY_OUTPUT_PATH_ROOT}/bin\" CACHE PATH \"Output directory for applications\" )\n endif()\n set( LIBRARY_OUTPUT_PATH \"${LIBRARY_OUTPUT_PATH_ROOT}/libs/${ANDROID_NDK_ABI_NAME}\" CACHE PATH \"path for android libs\" )\nendif()\n\n# copy shaed stl library to build directory\nif( NOT _CMAKE_IN_TRY_COMPILE AND __libstl MATCHES \"[.]so$\" )\n get_filename_component( __libstlname \"${__libstl}\" NAME )\n execute_process( COMMAND \"${CMAKE_COMMAND}\" -E copy_if_different \"${__libstl}\" \"${LIBRARY_OUTPUT_PATH}/${__libstlname}\" RESULT_VARIABLE __fileCopyProcess )\n if( NOT __fileCopyProcess EQUAL 0 OR NOT EXISTS \"${LIBRARY_OUTPUT_PATH}/${__libstlname}\")\n message( SEND_ERROR \"Failed copying of ${__libstl} to the ${LIBRARY_OUTPUT_PATH}/${__libstlname}\" )\n endif()\n unset( __fileCopyProcess )\n unset( __libstlname )\nendif()\n\n\n# set these global flags for cmake client scripts to change behavior\nset( ANDROID True )\nset( BUILD_ANDROID True )\n\n# where is the target environment\nset( CMAKE_FIND_ROOT_PATH \"${ANDROID_TOOLCHAIN_ROOT}/bin\" \"${ANDROID_TOOLCHAIN_ROOT}/${ANDROID_TOOLCHAIN_MACHINE_NAME}\" \"${ANDROID_SYSROOT}\" \"${CMAKE_INSTALL_PREFIX}\" \"${CMAKE_INSTALL_PREFIX}/share\" )\n\n# only search for libraries and includes in the ndk toolchain\nset( CMAKE_FIND_ROOT_PATH_MODE_PROGRAM ONLY )\nset( CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY )\nset( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY )\n\n\n# macro to find packages on the host OS\nmacro( find_host_package )\n set( CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER )\n set( CMAKE_FIND_ROOT_PATH_MODE_LIBRARY NEVER )\n set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE NEVER )\n if( CMAKE_HOST_WIN32 )\n SET( WIN32 1 )\n SET( UNIX )\n elseif( CMAKE_HOST_APPLE )\n SET( APPLE 1 )\n SET( UNIX )\n endif()\n find_package( ${ARGN} )\n SET( WIN32 )\n SET( APPLE )\n SET( UNIX 1 )\n set( CMAKE_FIND_ROOT_PATH_MODE_PROGRAM ONLY )\n set( CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY )\n set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY )\nendmacro()\n\n\n# macro to find programs on the host OS\nmacro( find_host_program )\n set( CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER )\n set( CMAKE_FIND_ROOT_PATH_MODE_LIBRARY NEVER )\n set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE NEVER )\n if( CMAKE_HOST_WIN32 )\n SET( WIN32 1 )\n SET( UNIX )\n elseif( CMAKE_HOST_APPLE )\n SET( APPLE 1 )\n SET( UNIX )\n endif()\n find_program( ${ARGN} )\n SET( WIN32 )\n SET( APPLE )\n SET( UNIX 1 )\n set( CMAKE_FIND_ROOT_PATH_MODE_PROGRAM ONLY )\n set( CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY )\n set( CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY )\nendmacro()\n\n\nmacro( ANDROID_GET_ABI_RAWNAME TOOLCHAIN_FLAG VAR )\n if( \"${TOOLCHAIN_FLAG}\" STREQUAL \"ARMEABI\" )\n set( ${VAR} \"armeabi\" )\n elseif( \"${TOOLCHAIN_FLAG}\" STREQUAL \"ARMEABI_V7A\" )\n set( ${VAR} \"armeabi-v7a\" )\n elseif( \"${TOOLCHAIN_FLAG}\" STREQUAL \"X86\" )\n set( ${VAR} \"x86\" )\n elseif( \"${TOOLCHAIN_FLAG}\" STREQUAL \"MIPS\" )\n set( ${VAR} \"mips\" )\n else()\n set( ${VAR} \"unknown\" )\n endif()\nendmacro()\n\n\n# export toolchain settings for the try_compile() command\nif( NOT PROJECT_NAME STREQUAL \"CMAKE_TRY_COMPILE\" )\n set( __toolchain_config \"\")\n foreach( __var NDK_CCACHE LIBRARY_OUTPUT_PATH_ROOT ANDROID_FORBID_SYGWIN ANDROID_SET_OBSOLETE_VARIABLES\n ANDROID_NDK_HOST_X64\n ANDROID_NDK\n ANDROID_NDK_LAYOUT\n ANDROID_STANDALONE_TOOLCHAIN\n ANDROID_TOOLCHAIN_NAME\n ANDROID_ABI\n ANDROID_NATIVE_API_LEVEL\n ANDROID_STL\n ANDROID_STL_FORCE_FEATURES\n ANDROID_FORCE_ARM_BUILD\n ANDROID_NO_UNDEFINED\n ANDROID_SO_UNDEFINED\n ANDROID_FUNCTION_LEVEL_LINKING\n ANDROID_GOLD_LINKER\n ANDROID_NOEXECSTACK\n ANDROID_RELRO\n ANDROID_LIBM_PATH\n ANDROID_EXPLICIT_CRT_LINK\n )\n if( DEFINED ${__var} )\n if( \"${__var}\" MATCHES \" \")\n set( __toolchain_config \"${__toolchain_config}set( ${__var} \\\"${${__var}}\\\" CACHE INTERNAL \\\"\\\" )\\n\" )\n else()\n set( __toolchain_config \"${__toolchain_config}set( ${__var} ${${__var}} CACHE INTERNAL \\\"\\\" )\\n\" )\n endif()\n endif()\n endforeach()\n file( WRITE \"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/android.toolchain.config.cmake\" \"${__toolchain_config}\" )\n unset( __toolchain_config )\nendif()\n\n\n# force cmake to produce / instead of \\ in build commands for Ninja generator\nif( CMAKE_GENERATOR MATCHES \"Ninja\" AND CMAKE_HOST_WIN32 )\n # it is a bad hack after all\n # CMake generates Ninja makefiles with UNIX paths only if it thinks that we are going to build with MinGW\n set( CMAKE_COMPILER_IS_MINGW TRUE ) # tell CMake that we are MinGW\n set( CMAKE_CROSSCOMPILING TRUE ) # stop recursion\n enable_language( C )\n enable_language( CXX )\n # unset( CMAKE_COMPILER_IS_MINGW ) # can't unset because CMake does not convert back-slashes in response files without it\n unset( MINGW )\nendif()\n\n\n# set some obsolete variables for backward compatibility\nset( ANDROID_SET_OBSOLETE_VARIABLES ON CACHE BOOL \"Define obsolete Andrid-specific cmake variables\" )\nmark_as_advanced( ANDROID_SET_OBSOLETE_VARIABLES )\nif( ANDROID_SET_OBSOLETE_VARIABLES )\n set( ANDROID_API_LEVEL ${ANDROID_NATIVE_API_LEVEL} )\n set( ARM_TARGET \"${ANDROID_ABI}\" )\n set( ARMEABI_NDK_NAME \"${ANDROID_NDK_ABI_NAME}\" )\nendif()\n\n\n# Variables controlling behavior or set by cmake toolchain:\n# ANDROID_ABI : \"armeabi-v7a\" (default), \"armeabi\", \"armeabi-v7a with NEON\", \"armeabi-v7a with VFPV3\", \"armeabi-v6 with VFP\", \"x86\", \"mips\"\n# ANDROID_NATIVE_API_LEVEL : 3,4,5,8,9,14 (depends on NDK version)\n# ANDROID_STL : gnustl_static/gnustl_shared/stlport_static/stlport_shared/gabi++_static/gabi++_shared/system_re/system/none\n# ANDROID_FORBID_SYGWIN : ON/OFF\n# ANDROID_NO_UNDEFINED : ON/OFF\n# ANDROID_SO_UNDEFINED : OFF/ON (default depends on NDK version)\n# ANDROID_FUNCTION_LEVEL_LINKING : ON/OFF\n# ANDROID_GOLD_LINKER : ON/OFF\n# ANDROID_NOEXECSTACK : ON/OFF\n# ANDROID_RELRO : ON/OFF\n# ANDROID_FORCE_ARM_BUILD : ON/OFF\n# ANDROID_STL_FORCE_FEATURES : ON/OFF\n# ANDROID_SET_OBSOLETE_VARIABLES : ON/OFF\n# Can be set only at the first run:\n# ANDROID_NDK\n# ANDROID_STANDALONE_TOOLCHAIN\n# ANDROID_TOOLCHAIN_NAME : the NDK name of compiler toolchain\n# ANDROID_NDK_HOST_X64 : try to use x86_64 toolchain (default for x64 host systems)\n# ANDROID_NDK_LAYOUT : the inner NDK structure (RELEASE, LINARO, ANDROID)\n# LIBRARY_OUTPUT_PATH_ROOT : \n# NDK_CCACHE : \n# Obsolete:\n# ANDROID_API_LEVEL : superseded by ANDROID_NATIVE_API_LEVEL\n# ARM_TARGET : superseded by ANDROID_ABI\n# ARM_TARGETS : superseded by ANDROID_ABI (can be set only)\n# ANDROID_NDK_TOOLCHAIN_ROOT : superseded by ANDROID_STANDALONE_TOOLCHAIN (can be set only)\n# ANDROID_USE_STLPORT : superseded by ANDROID_STL=stlport_static\n# ANDROID_LEVEL : superseded by ANDROID_NATIVE_API_LEVEL (completely removed)\n#\n# Primary read-only variables:\n# ANDROID : always TRUE\n# ARMEABI : TRUE for arm v6 and older devices\n# ARMEABI_V6 : TRUE for arm v6\n# ARMEABI_V7A : TRUE for arm v7a\n# NEON : TRUE if NEON unit is enabled\n# VFPV3 : TRUE if VFP version 3 is enabled\n# X86 : TRUE if configured for x86\n# MIPS : TRUE if configured for mips\n# BUILD_ANDROID : always TRUE\n# BUILD_WITH_ANDROID_NDK : TRUE if NDK is used\n# BUILD_WITH_STANDALONE_TOOLCHAIN : TRUE if standalone toolchain is used\n# ANDROID_NDK_HOST_SYSTEM_NAME : \"windows\", \"linux-x86\" or \"darwin-x86\" depending on host platform\n# ANDROID_NDK_ABI_NAME : \"armeabi\", \"armeabi-v7a\", \"x86\" or \"mips\" depending on ANDROID_ABI\n# ANDROID_NDK_RELEASE : one of r5, r5b, r5c, r6, r6b, r7, r7b, r7c, r8, r8b, r8c, r8d, r8e, r9, r9b, r9c, r9d; set only for NDK\n# ANDROID_ARCH_NAME : \"arm\" or \"x86\" or \"mips\" depending on ANDROID_ABI\n# ANDROID_SYSROOT : path to the compiler sysroot\n# TOOL_OS_SUFFIX : \"\" or \".exe\" depending on host platform\n# ANDROID_COMPILER_IS_CLANG : TRUE if clang compiler is used\n# Obsolete:\n# ARMEABI_NDK_NAME : superseded by ANDROID_NDK_ABI_NAME\n#\n# Secondary (less stable) read-only variables:\n# ANDROID_COMPILER_VERSION : GCC version used\n# ANDROID_CXX_FLAGS : C/C++ compiler flags required by Android platform\n# ANDROID_SUPPORTED_ABIS : list of currently allowed values for ANDROID_ABI\n# ANDROID_TOOLCHAIN_MACHINE_NAME : \"arm-linux-androideabi\", \"arm-eabi\" or \"i686-android-linux\"\n# ANDROID_TOOLCHAIN_ROOT : path to the top level of toolchain (standalone or placed inside NDK)\n# ANDROID_CLANG_TOOLCHAIN_ROOT : path to clang tools\n# ANDROID_SUPPORTED_NATIVE_API_LEVELS : list of native API levels found inside NDK\n# ANDROID_STL_INCLUDE_DIRS : stl include paths\n# ANDROID_RTTI : if rtti is enabled by the runtime\n# ANDROID_EXCEPTIONS : if exceptions are enabled by the runtime\n# ANDROID_GCC_TOOLCHAIN_NAME : read-only, differs from ANDROID_TOOLCHAIN_NAME only if clang is used\n# ANDROID_CLANG_VERSION : version of clang compiler if clang is used\n# ANDROID_LIBM_PATH : path to libm.so (set to something like $(TOP)/out/target/product//obj/lib/libm.so) to workaround unresolved `sincos`\n#\n# Defaults:\n# ANDROID_DEFAULT_NDK_API_LEVEL\n# ANDROID_DEFAULT_NDK_API_LEVEL_${ARCH}\n# ANDROID_NDK_SEARCH_PATHS\n# ANDROID_STANDALONE_TOOLCHAIN_SEARCH_PATH\n# ANDROID_SUPPORTED_ABIS_${ARCH}\n# ANDROID_SUPPORTED_NDK_VERSIONS\n", "meta": {"content_hash": "572927fa0aeabea778f166726d21e712", "timestamp": "", "source": "github", "line_count": 1471, "max_line_length": 289, "avg_line_length": 48.53229095853161, "alnum_prop": 0.6988415906766959, "repo_name": "dinahmoe/dm-cmake", "id": "c2000f487fcf15ea66a46698950906cf4c14642a", "size": "86390", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "android.toolchain.cmake", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CMake", "bytes": "114910"}]}} {"text": "LWM (compressed PlayCanvas model) file format toolset\n------\n\n* Live demo can be found here: https://playcanvas.com/project/358306/overview/lwm-model-example\n* Downloadable SDK can be found here: https://github.com/PsichiX/PlayCanvasLWM/releases\n\nTool usage:\n------\n\n1. To convert PlayCanvas JSON model into LWM model, first you need to get JSON file of your model uploaded into PlayCanvas Editor.\n\n2. Then you have to process it with `convertlwm.exe` tool (found in: `/tools/`; it'swritten in C#, so .NET 4.5 is needed to be installed on your machine):\n\n ```bash\n convertlwm.exe -v -i truck.json -o truck.lwm -r -jtb -sspd -ipq 64\n ```\n * `-v` - process will be verbose;\n * `-i truck.json` - path to input JSON file;\n * `-o truck.lwm` - path to output LWM file;\n * `-r` - overwrite output file if already exists;\n * `-jtb` - convert from JSON to LWM (if you want to convert from LWM to JSON, then you use `-btj`);\n * `-sspd` - enables Small Size of Packed Data mode (model will be compressed, not just translated into binary);\n * `-ipq 64` - defines Items Per Quant count (when used with `-sspd` it will quantize data into chunks that can be compressed even more than without quantization: small chunks - better compression);\n\n3. Upload compressed LWM model onto external server that support CORS (cross-origin) - if your server cannot support CORS for LWM files, you can use `getlwmfile.php` and `.htaccess` files from `/cors/` folder.\n\n4. Add `lib/lwm.js`(debug) or `lib/lwm.min.js`(release) LWM scripts into root object in scene (this library hooks into PlayCanvas engine and allow to decompress and convert LWM files into `pc.Model` instances).\n\n5. Add `components/LWMModel.js` component script into your entity scripts and put address of externally hosted LWM model file into `url` attribute (note that because you cannot upload LWM model files into PlayCanvas Editor, you have to host them externally).\n\n6. Create new JSON file with LWM materials maping description content, that will looks like:\n\n ```json\n {\n \"meshInstancesMapping\": [\n \"Right_Wiper\",\n \"Left_Wiper\",\n \"Right_Rear\",\n \"Left_Rear\",\n \"Left_Front\",\n \"Right_Front\",\n \"TruckBody\"\n ],\n \"materialsMapping\": {\n \"Right_Wiper\": \"Pickup Truck Texture\",\n \"Left_Wiper\": \"Pickup Truck Texture\",\n \"Right_Rear\": \"Pickup Truck Texture\",\n \"Left_Rear\": \"Pickup Truck Texture\",\n \"Left_Front\": \"Pickup Truck Texture\",\n \"Right_Front\": \"Pickup Truck Texture\",\n \"TruckBody\": \"Pickup Truck Texture\"\n }\n }\n ```\n * meshInstancesMapping tells which mesh instance index will have which name;\n * materialsMapping tells which mesh instance name will have which material (material can be either string name or assed id number);\n\n7. Attach that JSON into `materialsMapping` attribute.\n\n8. Run your game and check if there is no errors. If you will get an error during LWM model loading, the most reason is that your server does not support CORS correctly - check details in browser debug console!\n\nTODO:\n------\n\n* Add skin and animation data support\n", "meta": {"content_hash": "18a32d01c5c94f1317392dfa2356d6a4", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 258, "avg_line_length": 47.50769230769231, "alnum_prop": 0.7117875647668394, "repo_name": "PsichiX/PlayCanvasLWM", "id": "0c5b267f1dac4ac8e7025572c05b10502387b898", "size": "3088", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "221"}, {"name": "Batchfile", "bytes": "81"}, {"name": "JavaScript", "bytes": "4741"}, {"name": "PHP", "bytes": "1293"}, {"name": "Shell", "bytes": "75"}]}} {"text": "namespace Microsoft.Azure.CognitiveServices.Vision.Face\n{\n using Microsoft.Rest;\n using Microsoft.Rest.Serialization;\n using Models;\n using Newtonsoft.Json;\n using System.Collections;\n using System.Collections.Generic;\n using System.Net;\n using System.Net.Http;\n\n /// \n /// An API for face detection, verification, and identification.\n /// \n public partial class FaceAPI : ServiceClient, IFaceAPI\n {\n /// \n /// The base URI of the service.\n /// \n internal string BaseUri {get; set;}\n\n /// \n /// Gets or sets json serialization settings.\n /// \n public JsonSerializerSettings SerializationSettings { get; private set; }\n\n /// \n /// Gets or sets json deserialization settings.\n /// \n public JsonSerializerSettings DeserializationSettings { get; private set; }\n\n /// \n /// Supported Azure regions for Cognitive Services endpoints. Possible values\n /// include: 'westus', 'westeurope', 'southeastasia', 'eastus2',\n /// 'westcentralus', 'westus2', 'eastus', 'southcentralus', 'northeurope',\n /// 'eastasia', 'australiaeast', 'brazilsouth'\n /// \n public AzureRegions AzureRegion { get; set; }\n\n /// \n /// Subscription credentials which uniquely identify client subscription.\n /// \n public ServiceClientCredentials Credentials { get; private set; }\n\n /// \n /// Gets the IFaceOperations.\n /// \n public virtual IFaceOperations Face { get; private set; }\n\n /// \n /// Gets the IPersonGroupPerson.\n /// \n public virtual IPersonGroupPerson PersonGroupPerson { get; private set; }\n\n /// \n /// Gets the IPersonGroupOperations.\n /// \n public virtual IPersonGroupOperations PersonGroup { get; private set; }\n\n /// \n /// Gets the IFaceListOperations.\n /// \n public virtual IFaceListOperations FaceList { get; private set; }\n\n /// \n /// Initializes a new instance of the FaceAPI class.\n /// \n /// \n /// Optional. The delegating handlers to add to the http client pipeline.\n /// \n protected FaceAPI(params DelegatingHandler[] handlers) : base(handlers)\n {\n Initialize();\n }\n\n /// \n /// Initializes a new instance of the FaceAPI class.\n /// \n /// \n /// Optional. The http client handler used to handle http transport.\n /// \n /// \n /// Optional. The delegating handlers to add to the http client pipeline.\n /// \n protected FaceAPI(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers)\n {\n Initialize();\n }\n\n /// \n /// Initializes a new instance of the FaceAPI class.\n /// \n /// \n /// Required. Subscription credentials which uniquely identify client subscription.\n /// \n /// \n /// Optional. The delegating handlers to add to the http client pipeline.\n /// \n /// \n /// Thrown when a required parameter is null\n /// \n public FaceAPI(ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers)\n {\n if (credentials == null)\n {\n throw new System.ArgumentNullException(\"credentials\");\n }\n Credentials = credentials;\n if (Credentials != null)\n {\n Credentials.InitializeServiceClient(this);\n }\n }\n\n /// \n /// Initializes a new instance of the FaceAPI class.\n /// \n /// \n /// Required. Subscription credentials which uniquely identify client subscription.\n /// \n /// \n /// Optional. The http client handler used to handle http transport.\n /// \n /// \n /// Optional. The delegating handlers to add to the http client pipeline.\n /// \n /// \n /// Thrown when a required parameter is null\n /// \n public FaceAPI(ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)\n {\n if (credentials == null)\n {\n throw new System.ArgumentNullException(\"credentials\");\n }\n Credentials = credentials;\n if (Credentials != null)\n {\n Credentials.InitializeServiceClient(this);\n }\n }\n\n /// \n /// An optional partial-method to perform custom initialization.\n ///\n partial void CustomInitialize();\n /// \n /// Initializes client properties.\n /// \n private void Initialize()\n {\n Face = new FaceOperations(this);\n PersonGroupPerson = new PersonGroupPerson(this);\n PersonGroup = new PersonGroupOperations(this);\n FaceList = new FaceListOperations(this);\n BaseUri = \"https://{AzureRegion}.api.cognitive.microsoft.com/face/v1.0\";\n SerializationSettings = new JsonSerializerSettings\n {\n Formatting = Newtonsoft.Json.Formatting.Indented,\n DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,\n DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,\n NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,\n ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,\n ContractResolver = new ReadOnlyJsonContractResolver(),\n Converters = new List\n {\n new Iso8601TimeSpanConverter()\n }\n };\n DeserializationSettings = new JsonSerializerSettings\n {\n DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,\n DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,\n NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,\n ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,\n ContractResolver = new ReadOnlyJsonContractResolver(),\n Converters = new List\n {\n new Iso8601TimeSpanConverter()\n }\n };\n CustomInitialize();\n }\n }\n}\n", "meta": {"content_hash": "69e8a4581f958be9653e7bbb60732c95", "timestamp": "", "source": "github", "line_count": 185, "max_line_length": 158, "avg_line_length": 39.32432432432432, "alnum_prop": 0.5869415807560138, "repo_name": "DheerendraRathor/azure-sdk-for-net", "id": "ea63df3ce11e2bef12e46a6677a052bfdf05ea92", "size": "7628", "binary": false, "copies": "2", "ref": "refs/heads/psSdkJson6", "path": "src/SDKs/CognitiveServices/dataPlane/Vision/Face/Face/Generated/FaceAPI.cs", "mode": "33188", "license": "mit", "language": [{"name": "ASP", "bytes": "118"}, {"name": "Batchfile", "bytes": "15938"}, {"name": "C#", "bytes": "74830057"}, {"name": "CSS", "bytes": "685"}, {"name": "JavaScript", "bytes": "7875"}, {"name": "PowerShell", "bytes": "21530"}, {"name": "Shell", "bytes": "9959"}, {"name": "XSLT", "bytes": "6114"}]}} {"text": "$(document).ready(function() {\n $(\"#add-note\").on(\"submit\", function(event) {\n event.preventDefault();\n var action = $(this).attr(\"action\");\n var method = $(this).attr(\"method\");\n var data = $(this).serialize();\n var request = $.ajax(action, {\"method\": method, \"data\": data});\n request.done(function(response) {\n $(\"#private-notes\").append(response)\n })\n })\n\n $(\".delete\").on(\"click\", function(event) {\n event.preventDefault();\n \n var action = $(this).attr(\"href\");\n var method = \"delete\";\n var data = \"note_id=\" + $(this).attr(\"id\");\n var deleted_element = $(this).parent()\n // debugger;\n var request = $.ajax(action, {\"method\": method, \"data\": data});\n request.done(function() {\n deleted_element.remove();\n })\n })\n\n});\n", "meta": {"content_hash": "e7b418ace4155a63d0d452715ab2b5dd", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 67, "avg_line_length": 29.11111111111111, "alnum_prop": 0.5661577608142494, "repo_name": "helin24/FS-Momentum", "id": "1fab55c903800b2034ede1d5b80e9614be7b7f2e", "size": "786", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "public/js/skill.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "3484"}, {"name": "JavaScript", "bytes": "2123"}, {"name": "Ruby", "bytes": "17810"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.Data;\nusing System.Linq.Expressions;\n\nnamespace ServiceStack.OrmLite\n{\n public static class OrmLiteWriteExpressionsApi\n {\n /// \n /// Use an SqlExpression to select which fields to update and construct the where expression, E.g: \n /// \n /// var q = db.From>Person<());\n /// db.UpdateOnly(new Person { FirstName = \"JJ\" }, q.Update(p => p.FirstName).Where(x => x.FirstName == \"Jimi\"));\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ' WHERE (\"FirstName\" = 'Jimi')\n /// \n /// What's not in the update expression doesn't get updated. No where expression updates all rows. E.g:\n /// \n /// db.UpdateOnly(new Person { FirstName = \"JJ\", LastName = \"Hendo\" }, ev.Update(p => p.FirstName));\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ'\n /// \n public static int UpdateOnly(this IDbConnection dbConn, T model, SqlExpression onlyFields, Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateOnly(model, onlyFields, commandFilter));\n }\n\n /// \n /// Update only fields in the specified expression that matches the where condition (if any), E.g:\n /// \n /// db.UpdateOnly(() => new Person { FirstName = \"JJ\" }, where: p => p.LastName == \"Hendrix\");\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ' WHERE (\"LastName\" = 'Hendrix')\n ///\n /// db.UpdateOnly(() => new Person { FirstName = \"JJ\" });\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ'\n /// \n public static int UpdateOnly(this IDbConnection dbConn, \n Expression> updateFields,\n Expression> where = null,\n Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateOnly(updateFields, dbCmd.GetDialectProvider().SqlExpression().Where(where), commandFilter));\n }\n\n /// \n /// Update only fields in the specified expression that matches the where condition (if any), E.g:\n /// \n /// db.UpdateOnly(() => new Person { FirstName = \"JJ\" }, db.From>Person<().Where(p => p.LastName == \"Hendrix\"));\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ' WHERE (\"LastName\" = 'Hendrix')\n /// \n public static int UpdateOnly(this IDbConnection dbConn,\n Expression> updateFields,\n SqlExpression q,\n Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateOnly(updateFields, q, commandFilter));\n }\n\n /// \n /// Update only fields in the specified expression that matches the where condition (if any), E.g:\n ///\n /// var q = db.From>Person<().Where(p => p.LastName == \"Hendrix\");\n /// db.UpdateOnly(() => new Person { FirstName = \"JJ\" }, q.WhereExpression, q.Params);\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ' WHERE (\"LastName\" = 'Hendrix')\n /// \n public static int UpdateOnly(this IDbConnection dbConn,\n Expression> updateFields,\n string whereExpression,\n IEnumerable sqlParams,\n Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateOnly(updateFields, whereExpression, sqlParams, commandFilter));\n }\n\n /// \n /// Update record, updating only fields specified in updateOnly that matches the where condition (if any), E.g:\n /// \n /// db.UpdateOnly(new Person { FirstName = \"JJ\" }, p => p.FirstName, p => p.LastName == \"Hendrix\");\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ' WHERE (\"LastName\" = 'Hendrix')\n ///\n /// db.UpdateOnly(new Person { FirstName = \"JJ\" }, p => p.FirstName);\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ'\n ///\n /// db.UpdateOnly(new Person { FirstName = \"JJ\", Age = 27 }, p => new { p.FirstName, p.Age );\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ', \"Age\" = 27\n /// \n public static int UpdateOnly(this IDbConnection dbConn, T obj,\n Expression> onlyFields = null,\n Expression> where = null,\n Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateOnly(obj, onlyFields, where, commandFilter));\n }\n\n /// \n /// Update record, updating only fields specified in updateOnly that matches the where condition (if any), E.g:\n /// \n /// db.UpdateOnly(new Person { FirstName = \"JJ\" }, new[]{ \"FirstName\" }, p => p.LastName == \"Hendrix\");\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ' WHERE (\"LastName\" = 'Hendrix')\n /// \n public static int UpdateOnly(this IDbConnection dbConn, T obj,\n string[] onlyFields,\n Expression> where = null,\n Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateOnly(obj, onlyFields, where, commandFilter));\n }\n\n /// \n /// Update record, updating only fields specified in updateOnly that matches the where condition (if any), E.g:\n /// Numeric fields generates an increment sql which is useful to increment counters, etc...\n /// avoiding concurrency conflicts\n /// \n /// db.UpdateAdd(() => new Person { Age = 5 }, where: p => p.LastName == \"Hendrix\");\n /// UPDATE \"Person\" SET \"Age\" = \"Age\" + 5 WHERE (\"LastName\" = 'Hendrix')\n ///\n /// db.UpdateAdd(() => new Person { Age = 5 });\n /// UPDATE \"Person\" SET \"Age\" = \"Age\" + 5\n /// \n public static int UpdateAdd(this IDbConnection dbConn,\n Expression> updateFields,\n Expression> where = null,\n Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateAdd(updateFields, dbCmd.GetDialectProvider().SqlExpression().Where(where), commandFilter));\n }\n\n /// \n /// Update record, updating only fields specified in updateOnly that matches the where condition (if any), E.g:\n /// Numeric fields generates an increment sql which is useful to increment counters, etc...\n /// avoiding concurrency conflicts\n /// \n /// db.UpdateAdd(() => new Person { Age = 5 }, db.From<Person>().Where(p => p.LastName == \"Hendrix\"));\n /// UPDATE \"Person\" SET \"Age\" = \"Age\" + 5 WHERE (\"LastName\" = 'Hendrix')\n /// \n public static int UpdateAdd(this IDbConnection dbConn,\n Expression> updateFields,\n SqlExpression q,\n Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateAdd(updateFields, q, commandFilter));\n }\n\n /// \n /// Updates all values from Object Dictionary matching the where condition. E.g\n /// \n /// db.UpdateOnly<Person>(new Dictionary<string,object< { {\"FirstName\", \"JJ\"} }, where:p => p.FirstName == \"Jimi\");\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ' WHERE (\"FirstName\" = 'Jimi')\n /// \n public static int UpdateOnly(this IDbConnection dbConn, Dictionary updateFields, Expression> obj)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateOnly(updateFields, obj));\n }\n\n /// \n /// Updates all non-default values set on item matching the where condition (if any). E.g\n /// \n /// db.UpdateNonDefaults(new Person { FirstName = \"JJ\" }, p => p.FirstName == \"Jimi\");\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ' WHERE (\"FirstName\" = 'Jimi')\n /// \n public static int UpdateNonDefaults(this IDbConnection dbConn, T item, Expression> obj)\n {\n return dbConn.Exec(dbCmd => dbCmd.UpdateNonDefaults(item, obj));\n }\n\n /// \n /// Updates all values set on item matching the where condition (if any). E.g\n /// \n /// db.Update(new Person { Id = 1, FirstName = \"JJ\" }, p => p.LastName == \"Hendrix\");\n /// UPDATE \"Person\" SET \"Id\" = 1,\"FirstName\" = 'JJ',\"LastName\" = NULL,\"Age\" = 0 WHERE (\"LastName\" = 'Hendrix')\n /// \n public static int Update(this IDbConnection dbConn, T item, Expression> where, Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.Update(item, where, commandFilter));\n }\n\n /// \n /// Updates the entity using the primary key as the filter\n /// \n public static int Update(this IDbConnection dbConn, object entity)\n {\n return dbConn.Exec(dbCmd => dbCmd.Update(entity, where:null, commandFilter:null));\n }\n\n /// \n /// Updates all matching fields populated on anonymousType that matches where condition (if any). E.g:\n /// \n /// db.Update<Person>(new { FirstName = \"JJ\" }, p => p.LastName == \"Hendrix\");\n /// UPDATE \"Person\" SET \"FirstName\" = 'JJ' WHERE (\"LastName\" = 'Hendrix')\n /// \n public static int Update(this IDbConnection dbConn, object updateOnly, Expression> where, Action commandFilter = null)\n {\n return dbConn.Exec(dbCmd => dbCmd.Update(updateOnly, where, commandFilter));\n }\n\n /// \n /// Using an SqlExpression to only Insert the fields specified, e.g:\n /// \n /// db.InsertOnly(new Person { FirstName = \"Amy\" }, p => p.FirstName));\n /// INSERT INTO \"Person\" (\"FirstName\") VALUES ('Amy');\n /// \n /// db.InsertOnly(new Person { Id =1 , FirstName=\"Amy\" }, p => new { p.Id, p.FirstName }));\n /// INSERT INTO \"Person\" (\"Id\", \"FirstName\") VALUES (1, 'Amy');\n /// \n public static long InsertOnly(this IDbConnection dbConn, T obj, Expression> onlyFields, bool selectIdentity = false)\n {\n return dbConn.Exec(dbCmd => dbCmd.InsertOnly(obj, onlyFields.GetFieldNames(), selectIdentity));\n }\n\n /// \n /// Using an SqlExpression to only Insert the fields specified, e.g:\n /// \n /// db.InsertOnly(new Person { FirstName = \"Amy\" }, new[]{ \"FirstName\" }));\n /// INSERT INTO \"Person\" (\"FirstName\") VALUES ('Amy');\n /// \n public static long InsertOnly(this IDbConnection dbConn, T obj, string[] onlyFields, bool selectIdentity = false)\n {\n return dbConn.Exec(dbCmd => dbCmd.InsertOnly(obj, onlyFields, selectIdentity));\n }\n\n /// \n /// Using an SqlExpression to only Insert the fields specified, e.g:\n /// \n /// db.InsertOnly(() => new Person { FirstName = \"Amy\" }));\n /// INSERT INTO \"Person\" (\"FirstName\") VALUES (@FirstName);\n /// \n public static long InsertOnly(this IDbConnection dbConn, Expression> insertFields, bool selectIdentity = false)\n {\n return dbConn.Exec(dbCmd => dbCmd.InsertOnly(insertFields, selectIdentity));\n }\n\n /// \n /// Delete the rows that matches the where expression, e.g:\n /// \n /// db.Delete<Person>(p => p.Age == 27);\n /// DELETE FROM \"Person\" WHERE (\"Age\" = 27)\n /// \n public static int Delete(this IDbConnection dbConn, Expression> where)\n {\n return dbConn.Exec(dbCmd => dbCmd.Delete(where));\n }\n\n /// \n /// Delete the rows that matches the where expression, e.g:\n /// \n /// var q = db.From<Person>());\n /// db.Delete<Person>(q.Where(p => p.Age == 27));\n /// DELETE FROM \"Person\" WHERE (\"Age\" = 27)\n /// \n public static int Delete(this IDbConnection dbConn, SqlExpression where)\n {\n return dbConn.Exec(dbCmd => dbCmd.Delete(where));\n }\n }\n}", "meta": {"content_hash": "b9e0f6c6088382028c5e3bbadfbd6ecc", "timestamp": "", "source": "github", "line_count": 254, "max_line_length": 155, "avg_line_length": 49.25984251968504, "alnum_prop": 0.5728101023017903, "repo_name": "Pathfinder-Fr/YAFNET", "id": "0e47d06e0b057909bd41e636c4356ac7f807766e", "size": "12514", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "yafsrc/ServiceStack/ServiceStack.OrmLite/OrmLiteWriteExpressionsApi.cs", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ASP", "bytes": "1160769"}, {"name": "Batchfile", "bytes": "4612"}, {"name": "C#", "bytes": "32268813"}, {"name": "CSS", "bytes": "1119799"}, {"name": "HTML", "bytes": "14854"}, {"name": "JavaScript", "bytes": "5789303"}, {"name": "PLSQL", "bytes": "34218"}, {"name": "PLpgSQL", "bytes": "53478"}, {"name": "TSQL", "bytes": "706921"}]}} {"text": "SharePoint Add-in Recipe - Yammer Integration\n=============================================\n\nSummary\n-------\n\nThe approach you take to integrate Yammer with SharePoint is the same in the new SharePoint Add-in model as it is with Full Trust Code.\n\nHigh Level Guidelines\n---------------------\n\nAs a rule of a thumb, we would like to provide the following high level guidelines to integrate Yammer with SharePoint.\n\n- Yammer integration may be used in both on-premises and Office 365 SharePoint environments.\n- You can use the remote provisioning pattern to create Yammer groups and/or Yammer OpenGraph objects to facilitate conversations when you create new SharePoint sites.\n- You can use the out-of-the-box embed functionality to quickly and easily integrate Yammer with SharePoint.\n\t+ To use embed you need a HTML container 400 pixels or larger in your application.\n- You can use the Yammer SDKs and REST APIs to create customized integration functionality.\n\nOptions to integrate Yammer with SharePoint\n-------------------------------------------\n\nYou have a few options to integrate Yammer with SharePoint.\n\n- Embed\n\t+ Group, Topic, My, and User Feeds\n\t+ OpenGraph Feeds\n- Yammer OpenGraph API and/or Yammer REST API with Yammer SDKs\n\t\nEmbed\n-----\n\nIn this option you embed a Yammer feed in a SharePoint web page.\n\t\n- This option is quickly and easily implemented.\n- This option allows you to control limited aspects of the feed and how it appears.\n\nUsing embed looks like this in your SharePoint page:\n\n![](media/Recipes/Yammer/YammerEmbed.png)\n\nThe following table describes each type of Yammer feed you can access with embed out-of-the-box.\n\nFeed | Description | FeedType | Use Case\n---- | ----------- | -------- | --------\nMy Feed | My Feeds are where conversations are delivered for Yammer users. | MyFeed | My Site homepage or workspace site.\nUser Feed | All the conversations posted by a specific user in Yammer. | User | Profile pages for users in a system directory.\nTopic Feed | A feed of conversations that have been tagged with a topic in Yammer. | Topic | An event page on an intranet.\nGroup Feed | A feed of conversations that have been posted in a specified group. | Group | A team page on an intranet.\n\nIf you need to go beyond the capabilities of the out-of-the-box Yammer feeds in the table above you can use the OpenGraph embed option. This option gives you more control of the feed. The following table illustrates such an example.\n\nFeed | Description | FeedType | Use Case\n---- | ----------- | -------- | --------\nComment Feed | Uses Yammer\u2019s Open Graph API to facilitate conversation around an application object. | Custom | An opportunity in a custom CRM application, or a media detail page in a digital asset management system.\n\n**When is it a good fit?**\n\nWhen you are trying to integrate Yammer feeds with SharePoint sites and the out-of-the-box capabilities of the embed feed meet your needs.\n\n**Getting Started**\n\nThe following sample demonstrates how to provision sites with a Yammer feed associated with the site in place of the default news feed for the site.\n\n- [Provisioning.Yammer (O365 PnP Sample)](https://github.com/OfficeDev/PnP/tree/master/Scenarios/Provisioning.Yammer)\n\nThe **CreateYammerGroupDiscussionPartXml** method in the [YammerUtility.cs](https://github.com/OfficeDev/PnP/blob/master/OfficeDevPnP.Core/OfficeDevPnP.Core/Utilities/YammerUtility.cs) class comes from the [OfficeDevPnP.Core](https://github.com/OfficeDev/PnP/blob/master/OfficeDevPnP.Core/OfficeDevPnP.Core) sample. This method creates the XML for an App Part definition that is added to a SharePoint page when a site is provisioned. Notice the **feedType: 'group'** portion of the code. Here you can see the feedType is set to use the out-of-the-box group feedType.\n\n\tpublic static string CreateYammerGroupDiscussionPartXml(string yammerNetworkName, int yammerGroupId, bool showHeader, bool showFooter, bool useSSO = true)\n {\n StringBuilder wp = new StringBuilder(100);\n wp.Append(\"\");\n wp.Append(\"\");\n wp.Append(\"\t\");\n wp.Append(\"\t\t\");\n wp.Append(\"\t\t\t\");\n wp.Append(\"\t\t\tCannot import this Web Part.\");\n wp.Append(\"\t\t\");\n wp.Append(\"\t\t\");\n wp.Append(\"\t\t\t\");\n wp.Append(\"\t\t\t\t$Resources:core,ScriptEditorWebPartTitle;\");\n wp.Append(\"\t\t\t\t$Resources:core,ScriptEditorWebPartDescription;\");\n wp.Append(\"\t\t\t\tNone\");\n wp.Append(\"\t\t\t\t\");\n wp.Append(\"\t\t\t\t\");\n wp.Append(\"\t\t\t\t \");\n wp.Append(\"\t\t\t\t \");\n wp.Append(\"\t\t\t\t]]>\");\n wp.Append(\"\t\t\t\t\");\n wp.Append(\"\t\t\t\");\n wp.Append(\"\t\t\");\n wp.Append(\"\t\");\n wp.Append(\"\");\n\n return wp.ToString();\n }\n\nThe **CreateYammerOpenGraphDiscussionPartXml** method in the [YammerUtility.cs](https://github.com/OfficeDev/PnP/blob/master/OfficeDevPnP.Core/OfficeDevPnP.Core/Utilities/YammerUtility.cs) class comes from the [OfficeDevPnP.Core](https://github.com/OfficeDev/PnP/blob/master/OfficeDevPnP.Core/OfficeDevPnP.Core) sample. This method creates the XML for an App Part definition that is added to a SharePoint page when a site is provisioned. Notice the **feedType: 'open-graph'** portion of the code. Here you can see the feedType is set to use the OpenGraph API.\n\n\tpublic static string CreateYammerOpenGraphDiscussionPartXml(string yammerNetworkName, string url, bool showHeader, \n bool showFooter, string postTitle=\"\", string postImageUrl=\"\", \n bool useSso = true, string groupId = \"\")\n {\n StringBuilder wp = new StringBuilder(100);\n wp.Append(\"\");\n wp.Append(\"\");\n wp.Append(\"\t\");\n wp.Append(\"\t\t\");\n wp.Append(\"\t\t\t\");\n wp.Append(\"\t\t\tCannot import this Web Part.\");\n wp.Append(\"\t\t\");\n wp.Append(\"\t\t\");\n wp.Append(\"\t\t\t\");\n wp.Append(\"\t\t\t\t$Resources:core,ScriptEditorWebPartTitle;\");\n wp.Append(\"\t\t\t\t$Resources:core,ScriptEditorWebPartDescription;\");\n wp.Append(\"\t\t\t\tNone\");\n wp.Append(\"\t\t\t\t\");\n wp.Append(\"\t\t\t\t\");\n wp.Append(\"\t\t\t\t \");\n wp.Append(\"\t\t\t\t \");\n wp.Append(\"\t\t\t\t]]>\");\n wp.Append(\"\t\t\t\t\");\n wp.Append(\"\t\t\t\");\n wp.Append(\"\t\t\");\n wp.Append(\"\t\");\n wp.Append(\"\");\n\n return wp.ToString();\n }\n\nWatch the [Integrate Yammer feeds to SharePoint sites (O365 PnP Video)](https://channel9.msdn.com/blogs/OfficeDevPnP/Integrate-Yammer-feeds-to-SharePoint-sites) to see a walk through of the - [Provisioning.Yammer (O365 PnP Sample)](https://github.com/OfficeDev/PnP/tree/master/Scenarios/Provisioning.Yammer).\n\nFor more information about Yammer embed see the [Yammer Embed Feed (Yammer Developer Center)](https://developer.yammer.com/v1.0/docs/embed) article.\n\nFor more information about Yammer OpenGraph see the [Open Graph Introduction & Format (Yammer Developer Center)](https://developer.yammer.com/v1.0/docs/open-graph) article.\n\nYammer OpenGraph API & Yammer REST API with Yammer SDKs\n-------------------------------------------------------\n\nIn this option you use the Yammer OpenGraph API and/or Yammer REST API with Yammer SDKs to integrate Yammer with SharePoint. These APIs may also be used to integrate Yammer with processes outside of web pages. Examples of such scenarios include services and long running operations. \n\t\n- This option takes longer to implement.\n- This option allows you to control all aspects of the feed and how it appears and how you interact with it.\n\n**When is it a good fit?**\n\n- When you are trying to integrate Yammer feeds with SharePoint sites and the out-of-the-box capabilities of the embed feeds do not meet your needs.\n- When you are trying to integrate Yammer feeds into services or long running operations.\n\n**Getting Started**\n\nFor more information about Yammer OpenGraph see the [Open Graph Introduction & Format (Yammer Developer Center)](https://developer.yammer.com/v1.0/docs/open-graph) article.\n\nYammer SDKs provide you the ability to authenticate to Yammer. For more information about the Yammer SDKs see the following articles:\n\n- [JavaScript SDK](https://developer.yammer.com/v1.0/docs/js-sdk)\n- [Ruby SDK](https://developer.yammer.com/v1.0/docs/ruby-sdk)\n- [Python SDK](https://developer.yammer.com/v1.0/docs/python-sdk)\n- [iOS SDK](https://developer.yammer.com/v1.0/docs/ios-sdk)\n- [.NET SDK](https://developer.yammer.com/v1.0/docs/net-sdk)\n- [Windows Phone 8 SDK](https://developer.yammer.com/v1.0/docs/windows-phone-8-sdk)\n\nAfter you have authenticated to Yammer via the Yammer SDKs you can call the Yammer REST APIs. \n\nFor more information about Yammer REST APIs see the [REST API & Rate Limits (Yammer Developer Center)](https://developer.yammer.com/v1.0/docs/rest-api-rate-limits) article.\n\n**Authentication Note**\n\nIn a scenario where you sign into SharePoint with credentials that differ from the credentials you use to sign into SharePoint with you may wish to develop a single-sign-on capability for your users. An example of such a scenario is when you sign into SharePoint with a LiveID and you need to sign into Yammer with a Microsoft personal or work account.\n\nTo implement a single-sign-on scenario you can direct your users to sign into Yammer the first time they come to a SharePoint page with your custom Yammer component on it. After the user signs into Yammer via the Yammer SDK you can store the refresh token for the user in their user profile. Then, on subsequent visits to the page you can retrieve the refresh token from the user profile and use it to authenticate. With this approach your end users only need to sign into Yammer when their refresh token expires.\n\nRelated links\n=============\n- [Integrate Yammer feeds to SharePoint sites (O365 PnP Video)](https://channel9.msdn.com/blogs/OfficeDevPnP/Integrate-Yammer-feeds-to-SharePoint-sites)\n- [Yammer Embed Feed (Yammer Developer Center)](https://developer.yammer.com/v1.0/docs/embed)\n- [Open Graph Introduction & Format (Yammer Developer Center)](https://developer.yammer.com/v1.0/docs/open-graph)\n- Guidance articles at [http://aka.ms/OfficeDevPnPGuidance](http://aka.ms/OfficeDevPnPGuidance \"Guidance Articles\")\n- References in MSDN at [http://aka.ms/OfficeDevPnPMSDN](http://aka.ms/OfficeDevPnPMSDN \"References in MSDN\")\n- Videos at [http://aka.ms/OfficeDevPnPVideos](http://aka.ms/OfficeDevPnPVideos \"Videos\")\n\nRelated PnP samples\n===================\n- [Provisioning.Yammer (O365 PnP Sample)](https://github.com/OfficeDev/PnP/tree/master/Scenarios/Provisioning.Yammer)\n- [OfficeDevPnP.Core](https://github.com/OfficeDev/PnP/blob/master/OfficeDevPnP.Core/OfficeDevPnP.Core)\n- Samples and content at [http://aka.ms/OfficeDevPnP](http://aka.ms/OfficeDevPnP)\n\nApplies to\n==========\n- Office 365 Multi Tenant (MT)\n- Office 365 Dedicated (D)\n- SharePoint 2013 on-premises\n\nAuthor\n------\nTodd Baginski (Canviz LLC) - [@toddbaginski](https://twitter.com/toddbaginski)\n\nVersion history\n---------------\nVersion | Date | Comments | Author\n---------| -----| ---------| ------\n0.1 | July 6, 2015 | Initial draft | Todd Baginski (Canviz LLC)\n\n", "meta": {"content_hash": "c07bd269c801bd0184ef56c00e2c0eb5", "timestamp": "", "source": "github", "line_count": 237, "max_line_length": 569, "avg_line_length": 63.19409282700422, "alnum_prop": 0.6536689590705749, "repo_name": "weshackett/PnP-Guidance", "id": "ab788b050a2436f49033347666e6203dc906b845", "size": "14979", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "articles/SharePoint-Add-In-Recipe-yammer-integration.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.Reflection;\n\nnamespace Ploeh.AutoFixture.Kernel\n{\n /// \n /// A specification that determines whether the request is a request\n /// for a matching the specified name and .\n /// \n public class FieldSpecification : IRequestSpecification\n {\n private readonly Type targetType;\n private readonly string targetName;\n private readonly IEquatable target;\n\n /// \n /// Initializes a new instance of the class.\n /// \n /// \n /// The with which the requested\n /// type should be compatible.\n /// \n /// \n /// The name which the requested name\n /// should match exactly.\n /// \n /// \n /// or\n /// is .\n /// \n public FieldSpecification(Type targetType, string targetName)\n : this(CreateDefaultTarget(targetType, targetName))\n {\n this.targetType = targetType;\n this.targetName = targetName;\n }\n\n private static IEquatable CreateDefaultTarget(\n Type targetType,\n string targetName)\n {\n if (targetType == null)\n throw new ArgumentNullException(\"targetType\");\n if (targetName == null)\n throw new ArgumentNullException(\"targetName\");\n\n return new FieldTypeAndNameCriterion(\n new Criterion(\n targetType,\n new DerivesFromTypeComparer()),\n new Criterion(\n targetName,\n EqualityComparer.Default));\n }\n\n /// \n /// Initializes a new instance of the class.\n /// \n /// \n /// The criteria used to match the requested\n /// .\n /// \n /// \n /// is .\n /// \n public FieldSpecification(IEquatable target)\n {\n if (target == null)\n throw new ArgumentNullException(\"target\");\n\n this.target = target;\n }\n\n /// \n /// The with which the requested\n /// type should be compatible.\n /// \n [Obsolete(\"This value is only available if the constructor taking a target type and name is used. Otherwise, it'll be null. Use with caution. This propery will be removed in a future version of AutoFixture.\", false)]\n public Type TargetType\n {\n get { return this.targetType; }\n }\n\n /// \n /// The name which the requested name\n /// should match exactly.\n /// \n [Obsolete(\"This value is only available if the constructor taking a target type and name is used. Otherwise, it'll be null. Use with caution. This propery will be removed in a future version of AutoFixture.\", false)]\n public string TargetName\n {\n get { return this.targetName; }\n }\n\n /// \n /// Evaluates a request for a specimen.\n /// \n /// The specimen request.\n /// \n /// if is satisfied by the Specification;\n /// otherwise, .\n /// \n public bool IsSatisfiedBy(object request)\n {\n if (request == null)\n throw new ArgumentNullException(\"request\");\n\n var f = request as FieldInfo;\n if (f == null)\n return false;\n\n return this.target.Equals(f);\n }\n\n private class DerivesFromTypeComparer : IEqualityComparer\n {\n public bool Equals(Type x, Type y)\n {\n if (y == null && x == null)\n return true;\n if (y == null)\n return false;\n return y.IsAssignableFrom(x);\n }\n\n public int GetHashCode(Type obj)\n {\n return 0;\n }\n }\n }\n}\n", "meta": {"content_hash": "61f5e6b4d20ee0fbf466e8559b7b5856", "timestamp": "", "source": "github", "line_count": 132, "max_line_length": 224, "avg_line_length": 36.22727272727273, "alnum_prop": 0.5529067335842743, "repo_name": "yuva2achieve/AutoFixture", "id": "fe012e6d83d0d9d3f20f2ad57e6bbfd9152a0dd6", "size": "4784", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Src/AutoFixture/Kernel/FieldSpecification.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "3713467"}, {"name": "F#", "bytes": "43535"}, {"name": "PowerShell", "bytes": "675"}, {"name": "Puppet", "bytes": "170"}, {"name": "Shell", "bytes": "309"}, {"name": "Smalltalk", "bytes": "2018"}, {"name": "XSLT", "bytes": "17270"}]}} {"text": "\n\n \n \n Statistics of Polarity in UD_Uyghur-UDT\n \n \n \n \n \n \n \n \n \n \n \n\n\n\n \n \n
\n\n
\n
\n home\n\n edit page\n issue tracker\n \n \n
\n
\n\n
\n\n \n
\n This page pertains to UD version 2.\n
\n \n\n
\n \n\n \n \n

Treebank Statistics: UD_Uyghur-UDT: Features: Polarity

\n\n

This feature is universal.\nIt occurs with 1 different values: Neg.

\n\n

193 tokens (0%) have a non-empty value of Polarity.\n89 types (1%) occur at least once with a non-empty value of Polarity.\n49 lemmas (2%) occur at least once with a non-empty value of Polarity.\nThe feature is used with 2 part-of-speech tags: VERB (135; 0% instances), AUX (58; 0% instances).

\n\n

VERB

\n\n

135 VERB tokens (2% of all VERB tokens) have a non-empty value of Polarity.

\n\n

The most frequent other feature values with which VERB and Polarity co-occurred: Case=EMPTY (135; 100%), VerbForm=Fin (114; 84%), Mood=Ind (109; 81%), Person=3 (108; 80%), Aspect=EMPTY (102; 76%), Number=Sing (85; 63%), Tense=Past (76; 56%).

\n\n

VERB tokens may have the following values of Polarity:

\n\n
    \n
  • Neg (135; 100% of non-empty Polarity): \u0628\u0648\u0644\u0645\u0649\u063a\u0627\u0646\u060c \u0628\u0648\u0644\u0645\u0627\u064a\u062f\u06c7\u060c \u0628\u0627\u0642\u0645\u0649\u063a\u0627\u0646\u060c \u0628\u0648\u0644\u0645\u0649\u0633\u0627\u060c \u0642\u0649\u0644\u0645\u0649\u062f\u0649\u060c \u0626\u0648\u062e\u0634\u0649\u0645\u0649\u063a\u0627\u0646\u060c \u0626\u0648\u064a\u0644\u0649\u0645\u0649\u063a\u0627\u0646\u060c \u062f\u06d0\u0645\u0649\u0633\u0649\u0645\u06c7\u060c \u0642\u0627\u0631\u0649\u0645\u0627\u064a\u062f\u06c7\u060c \u0643\u06d5\u0644\u0645\u06d5\u064a\u062f\u06c7
  • \n
  • EMPTY (8193): \u062f\u06d5\u067e\u060c \u062f\u06d0\u062f\u0649\u060c \u0628\u0648\u0644\u06c7\u067e\u060c \u0642\u0649\u0644\u0649\u067e\u060c \u062f\u06d5\u067e\u062a\u06c7\u060c \u0642\u0627\u0631\u0627\u067e\u060c \u0628\u0627\u0631\u060c \u0626\u06d0\u0644\u0649\u067e\u060c \u062f\u06d0\u06af\u06d5\u0646\u060c \u0643\u06d0\u0644\u0649\u067e
  • \n
\n\n

Polarity seems to be lexical feature of VERB. 100% lemmas (48) occur only with one value of Polarity.

\n\n

AUX

\n\n

58 AUX tokens (6% of all AUX tokens) have a non-empty value of Polarity.

\n\n

The most frequent other feature values with which AUX and Polarity co-occurred: Person=3 (56; 97%), Number=Sing (55; 95%), Aspect=EMPTY (44; 76%), Tense=EMPTY (39; 67%), Mood=EMPTY (36; 62%), VerbForm=EMPTY (36; 62%).

\n\n

AUX tokens may have the following values of Polarity:

\n\n
    \n
  • Neg (58; 100% of non-empty Polarity): \u0626\u06d5\u0645\u06d5\u0633\u060c \u0628\u0648\u0644\u0645\u0627\u064a\u062f\u06c7\u060c \u0626\u06d5\u0645\u06d5\u0633\u0645\u06c7\u060c \u0628\u0648\u0644\u0645\u0649\u063a\u0627\u0646\u060c \u0642\u0627\u0644\u0645\u0627\u064a\u062f\u06c7\u060c \u0628\u0648\u0644\u0645\u0627\u0645\u062f\u06c7\u060c \u0628\u0648\u0644\u0645\u0649\u0633\u0627\u060c \u0628\u0648\u0644\u0645\u0649\u0633\u0649\u0645\u06c7\u060c \u062a\u06c7\u0631\u0645\u0627\u0645\u062f\u06c7\u060c \u0642\u0627\u0644\u0645\u0649\u062f\u06c7\u0642
  • \n
  • EMPTY (852): \u0626\u0649\u062f\u0649\u060c \u0628\u0648\u0644\u06c7\u067e\u060c \u0626\u0649\u0643\u06d5\u0646\u060c \u0643\u06d5\u062a\u062a\u0649\u060c \u0642\u0627\u0644\u062f\u0649\u060c \u0628\u0648\u0644\u0633\u0627\u060c \u0628\u0648\u0644\u062f\u0649\u060c \u0628\u0648\u0644\u0649\u062f\u06c7\u060c \u0642\u0627\u0644\u062f\u0649\u0645\u060c \u062a\u06c7\u0631\u06c7\u067e
  • \n
\n\n\n
\n\n\n\n\n\n\n\n\n\n
\n

© 2014\u20132021\n Universal Dependencies contributors.\n Site powered by Annodoc and brat

.\n
\n
\n \n\n", "meta": {"content_hash": "ab2b5d4326226616abf4c1a7abfa5377", "timestamp": "", "source": "github", "line_count": 191, "max_line_length": 753, "avg_line_length": 55.18324607329843, "alnum_prop": 0.6471537001897533, "repo_name": "UniversalDependencies/universaldependencies.github.io", "id": "7eaa83f3b8c1e691deb33d56696de7140371fd23", "size": "10833", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "treebanks/ug_udt/ug_udt-feat-Polarity.html", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "64420"}, {"name": "HTML", "bytes": "383191916"}, {"name": "JavaScript", "bytes": "687350"}, {"name": "Perl", "bytes": "7788"}, {"name": "Python", "bytes": "21203"}, {"name": "Shell", "bytes": "7253"}]}} {"text": "'use strict';\n// mailgun messaging module\n// requires account from www.mailgun.com\n// Free email service for up to 400 messages a day\n\n// require config for environment variables\nvar config = require(\"../../config\");\n\n// set config variables\nvar api_key = config.mailgunPrivateApiKey;\nvar domain = config.mailgunDomain;\nvar fromEmail = config.mailgunFromEmail;\n\n// initialize mailgun service\nvar mailgun = require('mailgun-js')({\n\tapiKey: api_key,\n\tdomain: domain\n});\n\n// create messaging module export object\nvar mailer = {};\n\n// sendOne\n// sends one email, requires paramater object :\n// params = {\n// to: \"Blah blah \",\n// subject: \"Blah dee blah\",\n// text: \"Blah deedee blah\",\n// html: \"

Blah deedee blah

\"\n// };\nmailer.sendOne = function(params, callback) {\n\n\tvar data = {\n\t\tfrom: fromEmail,\n\t\tto: params.to,\n\t\tsubject: params.subject,\n\t\ttext: params.text,\n\t\thtml: params.html\n\t};\n\n\t// send message; if there is a callback let it handle the err, else handle err\n\tmailgun.messages().send(data, function(err) {\n\t\tif (callback) {\n\t\t\tcallback(err);\n\t\t} else if (err) {\n\t\t\tconsole.log(err);\n\t\t} else {\n\t\t\tconsole.log('email sent!');\n\t\t}\n\t});\n};\n\n// export\nmodule.exports = mailer;\n", "meta": {"content_hash": "9509f2a773189dac292fd801580db52d", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 79, "avg_line_length": 22.40740740740741, "alnum_prop": 0.6735537190082644, "repo_name": "wtabikeshare/open-bike-project", "id": "b3bc4a1abc22c7e9152592f0096aa1aec386b12d", "size": "1210", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/mailgun/index.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "259"}, {"name": "HTML", "bytes": "25553"}, {"name": "JavaScript", "bytes": "42672"}]}} {"text": "The tool is designed to query the [PassiveTotal - Get Passive DNS](http://api.passivetotal.org/api/docs/#api-Passive_DNS-GetV2DnsPassive) and [VirusTotal - IP Address Report](https://developers.virustotal.com/v2.0/reference#ip-address-report) APIs to return all domains associated with an IP. Then each site is retrieved live to attempt to detect web technologies in use on each record. The web technology detection is based on a python port of [Wappalyzer](https://wappalyzer.com/) that has been modified to work with Python 3.6.2.\n\n**Warning:** This will reach out to all domains associated with an IP, including possibily malicious domains. Use with caution.\nTo reduce the number of malicious domains, the `MALWARE` config can be set to False to remove VirusTotal's \"detected urls.\"\n\n*Note:* PassiveTotal's Community API only supports 15 lookups a day. VirusTotal's Public API caps at 4 queries a minute.\n\n## Inputs\n* IP or List of IPs\n* Job ID for tracking\n\n## Outputs\nWeb Technologies in use on domains seen in passive DNS records for each IP\n\nUp-to-date apps.json:\n https://raw.githubusercontent.com/AliasIO/Wappalyzer/master/src/apps.json\n \n## Install\n1. Install Python 3.6.2 \n2. pip3 install six\n3. pip3 install requests\n4. pip3 install flask\n5. Update configs.cfg with API keys\n6. python3 getwt.py\n7. Web browse to the IP and Port configured in configs\n\n### Modules in Use\n* Python\n * Version: 3.6.2\n * Use: Code Base\n* flask\n * Version: 0.12.2\n * Use: User Interface\n* sqlite3\n * Version: 2.6.0\n * Use: Backend Database\n* [wad](https://pypi.python.org/pypi/wad)\n * Version: Custom modified\n * Use: Web technology scanner\n * Notes: Modified to work with Python 3.6.2\n* six\n * Version: 1.10.0\n* requests\n * Version 2.18.4\n * Use: Retrieve online apps.json\n \n#### TODO:\n* Convert legacy WAD code to beautifulSoup/requests\n* Support other databases\n* Handle sites with tarpit-style responses\n* Add [PassiveTotal - Host_Attributes - Get Components](http://api.passivetotal.org/api/docs/#api-Host_Attributes-GetV2HostAttributesComponents) Integration\n* Add support for \"Last Seen Date\"\n\n", "meta": {"content_hash": "ceaac205a83cf8eef0c732bcd0914adf", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 532, "avg_line_length": 39.79245283018868, "alnum_prop": 0.7496443812233285, "repo_name": "import-au/GetWT", "id": "2c77b34aa6f1d09ff131b462de9967a90cbfa24d", "size": "2133", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "5579"}, {"name": "Python", "bytes": "38791"}]}} {"text": "\npackage com.opengamma.analytics.financial.interestrate.bond.provider;\n\nimport static com.opengamma.financial.convention.yield.SimpleYieldConvention.INDEX_LINKED_FLOAT;\nimport static com.opengamma.financial.convention.yield.SimpleYieldConvention.UK_IL_BOND;\nimport static com.opengamma.financial.convention.yield.SimpleYieldConvention.US_IL_REAL;\n\nimport org.apache.commons.lang.Validate;\n\nimport com.opengamma.analytics.financial.instrument.inflation.CouponInflationGearing;\nimport com.opengamma.analytics.financial.interestrate.bond.definition.BondCapitalIndexedSecurity;\nimport com.opengamma.analytics.financial.interestrate.inflation.derivative.CouponInflationZeroCouponInterpolationGearing;\nimport com.opengamma.analytics.financial.interestrate.inflation.derivative.CouponInflationZeroCouponMonthlyGearing;\nimport com.opengamma.analytics.financial.interestrate.payments.derivative.Coupon;\nimport com.opengamma.analytics.financial.provider.calculator.inflation.NetAmountInflationCalculator;\nimport com.opengamma.analytics.financial.provider.calculator.inflation.PresentValueCurveSensitivityDiscountingInflationCalculator;\nimport com.opengamma.analytics.financial.provider.calculator.inflation.PresentValueDiscountingInflationCalculator;\nimport com.opengamma.analytics.financial.provider.description.inflation.InflationIssuerProviderInterface;\nimport com.opengamma.analytics.financial.provider.description.inflation.InflationIssuerProviderIssuerDecoratedSpread;\nimport com.opengamma.analytics.financial.provider.description.inflation.InflationProviderDecoratedIssuer;\nimport com.opengamma.analytics.financial.provider.description.inflation.InflationProviderInterface;\nimport com.opengamma.analytics.financial.provider.sensitivity.inflation.MultipleCurrencyInflationSensitivity;\nimport com.opengamma.analytics.math.function.Function1D;\nimport com.opengamma.analytics.math.rootfinding.BracketRoot;\nimport com.opengamma.analytics.math.rootfinding.BrentSingleRootFinder;\nimport com.opengamma.analytics.math.rootfinding.RealSingleRootFinder;\nimport com.opengamma.financial.convention.yield.YieldConvention;\nimport com.opengamma.util.ArgumentChecker;\nimport com.opengamma.util.money.Currency;\nimport com.opengamma.util.money.MultipleCurrencyAmount;\n\n/**\n * Pricing method for inflation bond. The price is computed by index estimation and discounting.\n */\npublic final class BondCapitalIndexedSecurityDiscountingMethod {\n\n /**\n * The unique instance of the class.\n */\n private static final BondCapitalIndexedSecurityDiscountingMethod INSTANCE = new BondCapitalIndexedSecurityDiscountingMethod();\n\n /**\n * Return the class instance.\n * \n * @return The instance.\n */\n public static BondCapitalIndexedSecurityDiscountingMethod getInstance() {\n return INSTANCE;\n }\n\n /**\n * The present value inflation calculator (for the different parts of the bond transaction).\n */\n private static final PresentValueDiscountingInflationCalculator PVIC = PresentValueDiscountingInflationCalculator.getInstance();\n private static final NetAmountInflationCalculator NAIC = NetAmountInflationCalculator.getInstance();\n private static final PresentValueCurveSensitivityDiscountingInflationCalculator PVCSIC = PresentValueCurveSensitivityDiscountingInflationCalculator\n .getInstance();\n // TODO: REVIEW: Method depends on Calculator; Calculator would depend on Method (code duplicated to avoid circularity).\n /**\n * The root bracket used for yield finding.\n */\n private static final BracketRoot BRACKETER = new BracketRoot();\n /**\n * The root finder used for yield finding.\n */\n private static final RealSingleRootFinder ROOT_FINDER = new BrentSingleRootFinder();\n\n /**\n * Computes the present value of a capital indexed bound by index estimation and discounting. The value is the value of the nominal and the coupons but not\n * the settlement.\n * \n * @param bond\n * The bond.\n * @param provider\n * The provider.\n * @return The present value.\n */\n public MultipleCurrencyAmount presentValue(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface provider) {\n ArgumentChecker.notNull(bond, \"Bond\");\n final InflationProviderInterface creditDiscounting = new InflationProviderDecoratedIssuer(provider, bond.getCurrency(), bond.getIssuerEntity());\n final MultipleCurrencyAmount pvNominal = bond.getNominal().accept(PVIC, creditDiscounting);\n final MultipleCurrencyAmount pvCoupon = bond.getCoupon().accept(PVIC, creditDiscounting);\n return pvNominal.plus(pvCoupon);\n }\n\n /**\n * Computes the security present value from a quoted clean real price. The real accrued are added to the clean real price, the result is multiplied by the\n * inflation index ratio and then discounted from settlement time to 0 with the discounting curve.\n * \n * @param bond\n * The bond security.\n * @param market\n * The market.\n * @param cleanPriceReal\n * The real clean price.\n * @return The present value.\n */\n public MultipleCurrencyAmount presentValueFromCleanRealPrice(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface market,\n final double cleanPriceReal) {\n Validate.notNull(bond, \"Coupon\");\n Validate.notNull(market, \"Market\");\n final double settlement = bond.getSettlement().accept(PVIC, market.getInflationProvider()).getAmount(bond.getCurrency());\n final double cleanPriceNominal = cleanPriceReal * settlement;\n return presentValueFromCleanNominalPrice(bond, market, cleanPriceNominal);\n }\n\n /**\n * Computes the security present value from a quoted clean real price. The real accrued are added to the clean real price, the result is multiplied by the\n * inflation index ratio and then discounted from settlement time to 0 with the discounting curve.\n * \n * @param bond\n * The bond security.\n * @param market\n * The market.\n * @param cleanPriceNominal\n * The nominal clean price.\n * @return The present value.\n */\n public MultipleCurrencyAmount presentValueFromCleanNominalPrice(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface market,\n final double cleanPriceNominal) {\n Validate.notNull(bond, \"Coupon\");\n Validate.notNull(market, \"Market\");\n final double notional = bond.getCoupon().getNthPayment(0).getNotional();\n final MultipleCurrencyAmount nominalAccruedInterest = bond.getSettlement().accept(PVIC, market.getInflationProvider())\n .multipliedBy(bond.getAccruedInterest() / notional);\n final double dirtyPriceNominal = cleanPriceNominal + nominalAccruedInterest.getAmount(bond.getCurrency());\n return MultipleCurrencyAmount.of(bond.getCurrency(), dirtyPriceNominal);\n }\n\n /**\n * Calculates the accrued interest for a fixed-coupon bond using the clean price. The accrued interest is defined as dirty price - clean price.\n * \n * @param bond\n * The bond, not null\n * @param cleanPrice\n * The clean price\n * @return The accrued interest\n */\n public double accruedInterestFromCleanRealPrice(final BondCapitalIndexedSecurity bond, final double cleanPrice) {\n ArgumentChecker.notNull(bond, \"bond\");\n return dirtyRealPriceFromCleanRealPrice(bond, cleanPrice) - cleanPrice;\n }\n\n /**\n * Calculates the accrued interest for a fixed-coupon bond using the clean price. The accrued interest is defined as dirty price - clean price.\n * \n * @param bond\n * The bond, not null\n * @param yield\n * The yield\n * @return The accrued interest\n */\n public double accruedInterestFromCleanYield(final BondCapitalIndexedSecurity bond, final double yield) {\n ArgumentChecker.notNull(bond, \"bond\");\n return dirtyPriceFromRealYield(bond, yield) - cleanPriceFromYield(bond, yield);\n }\n\n /**\n * Computes the clean real price of a bond security from a dirty real price.\n * \n * @param bond\n * The bond security.\n * @param dirtyPrice\n * The dirty price.\n * @return The clean price.\n */\n public double cleanRealPriceFromDirtyRealPrice(final BondCapitalIndexedSecurity bond, final double dirtyPrice) {\n final double notional = bond.getCoupon().getNthPayment(0).getNotional();\n return dirtyPrice - bond.getAccruedInterest() / notional;\n }\n\n /**\n * Computes the clean nominal price of a bond security from a dirty real price.\n * \n * @param bond\n * The bond security.\n * @param dirtyPrice\n * The dirty price.\n * @return The clean price.\n */\n public double cleanNominalPriceFromDirtyNominalPrice(final BondCapitalIndexedSecurity bond, final double dirtyPrice) {\n final double notional = bond.getCoupon().getNthPayment(0).getNotional();\n final double indexRatio = bond.getIndexRatio();\n return dirtyPrice - bond.getAccruedInterest() / notional * indexRatio;\n }\n\n /**\n * Computes the clean price of a bond security from curves.\n * \n * @param bond\n * The bond security.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @return The clean price.\n */\n public double cleanRealPriceFromCurves(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves) {\n final double indexRatio = bond.getIndexRatio();\n final double notional = bond.getCoupon().getNthPayment(0).getNotional();\n final double df = issuerMulticurves.getMulticurveProvider().getDiscountFactor(bond.getCurrency(), bond.getSettlementTime());\n final double pvReal = presentValue(bond, issuerMulticurves).getAmount(bond.getCurrency()) / indexRatio / df / notional;\n return pvReal - bond.getAccruedInterest() / notional;\n }\n\n /**\n * Computes the clean price of a bond security from curves.\n * \n * @param bond\n * The bond security.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @return The clean price.\n */\n public double cleanNominalPriceFromCurves(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves) {\n final double dirtyNominalPrice = dirtyNominalPriceFromCurves(bond, issuerMulticurves);\n return cleanNominalPriceFromDirtyNominalPrice(bond, dirtyNominalPrice);\n }\n\n /**\n * Computes the clean real price of a bond security from a dirty real price.\n * \n * @param bond\n * The bond security.\n * @param cleanNominalPrice\n * The clean nominal price.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @return The clean price.\n */\n public double cleanRealPriceFromCleanNominalPrice(final BondCapitalIndexedSecurity bond, final double cleanNominalPrice,\n final InflationIssuerProviderInterface issuerMulticurves) {\n final double indexRatio = bond.getIndexRatio();\n return cleanNominalPrice / indexRatio;\n }\n\n /**\n * Compute the dirty price of a bond security from curves.\n * \n * @param bond\n * The bond security.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @return The dirty price.\n */\n public double dirtyNominalPriceFromCurves(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves) {\n ArgumentChecker.notNull(bond, \"Bond\");\n ArgumentChecker.notNull(issuerMulticurves, \"Issuer and multi-curves provider\");\n final MultipleCurrencyAmount pv = presentValue(bond, issuerMulticurves);\n final double df = issuerMulticurves.getMulticurveProvider().getDiscountFactor(bond.getCurrency(), bond.getSettlementTime());\n final double notional = bond.getCoupon().getNthPayment(0).getNotional();\n return pv.getAmount(bond.getCurrency()) / df / notional;\n }\n\n /**\n * Compute the dirty price of a bond security from curves.\n * \n * @param bond\n * The bond security.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @return The dirty price.\n */\n public double dirtyRealPriceFromCurves(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves) {\n ArgumentChecker.notNull(bond, \"Bond\");\n ArgumentChecker.notNull(issuerMulticurves, \"Issuer and multi-curves provider\");\n final MultipleCurrencyAmount pv = presentValue(bond, issuerMulticurves);\n final double settlement = bond.getIndexRatio();\n final double notional = bond.getCoupon().getNthPayment(0).getNotional();\n return pv.getAmount(bond.getCurrency()) / settlement / notional;\n }\n\n /**\n * Computes the dirty real price of a bond security from the clean real price.\n * \n * @param bond\n * The bond security.\n * @param cleanPrice\n * The clean price.\n * @return The clean price.\n */\n public double dirtyRealPriceFromCleanRealPrice(final BondCapitalIndexedSecurity bond, final double cleanPrice) {\n final double notional = bond.getCoupon().getNthPayment(0).getNotional();\n return cleanPrice + bond.getAccruedInterest() / notional;\n }\n\n /**\n * The net amount paid at settlement date for a given clean real price.\n * \n * @param bond\n * The bond.\n * @param market\n * The market.\n * @param cleanPriceReal\n * The clean real price.\n * @return The net amount.\n */\n public MultipleCurrencyAmount netAmount(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface market,\n final double cleanPriceReal) {\n final double notional = bond.getCoupon().getNthPayment(0).getNotional();\n final double netAmountRealByUnit = cleanPriceReal + bond.getAccruedInterest() / notional;\n final MultipleCurrencyAmount netAmount = bond.getSettlement().accept(NAIC, market.getInflationProvider());\n return netAmount.multipliedBy(netAmountRealByUnit);\n\n }\n\n /**\n * Computes the dirty (real or nominal depending of the convention) price from the conventional real yield.\n * \n * @param bond\n * The bond security.\n * @param yield\n * The bond yield.\n * @return The dirty price.\n */\n public double dirtyPriceFromRealYield(final BondCapitalIndexedSecurity bond, final double yield) {\n Validate.isTrue(bond.getNominal().getNumberOfPayments() == 1, \"Yield: more than one nominal repayment.\");\n final int nbCoupon = bond.getCoupon().getNumberOfPayments();\n final YieldConvention yieldConvention = bond.getYieldConvention();\n if (yieldConvention.equals(US_IL_REAL)) {\n // Coupon period rate to next coupon and simple rate from next coupon to settlement.\n double pvAtFirstCoupon;\n if (Math.abs(yield) > 1.0E-8) {\n final double factorOnPeriod = 1 + yield / bond.getCouponPerYear();\n final double vn = Math.pow(factorOnPeriod, 1 - nbCoupon);\n pvAtFirstCoupon = ((CouponInflationGearing) bond.getCoupon().getNthPayment(0)).getFactor() / yield * (factorOnPeriod - vn) + vn;\n } else {\n pvAtFirstCoupon = ((CouponInflationGearing) bond.getCoupon().getNthPayment(0)).getFactor() / bond.getCouponPerYear() * nbCoupon + 1;\n }\n return pvAtFirstCoupon / (1 + bond.getAccrualFactorToNextCoupon() * yield / bond.getCouponPerYear());\n }\n\n if (yieldConvention.equals(INDEX_LINKED_FLOAT)) {\n final double realRate = ((CouponInflationGearing) bond.getCoupon().getNthPayment(1)).getFactor();\n final double firstYearFraction = bond.getCoupon().getNthPayment(0).getPaymentYearFraction();\n double firstCouponEndFixingTime = 0.0;\n double firstCouponPayementTime = 0.0;\n if (bond.getCoupon().getNthPayment(1) instanceof CouponInflationZeroCouponInterpolationGearing) {\n firstCouponEndFixingTime = ((CouponInflationZeroCouponInterpolationGearing) bond.getCoupon().getNthPayment(1)).getReferenceEndTime()[1];\n firstCouponPayementTime = ((CouponInflationZeroCouponInterpolationGearing) bond.getCoupon().getNthPayment(1)).getPaymentTime();\n } else if (bond.getCoupon().getNthPayment(1) instanceof CouponInflationZeroCouponMonthlyGearing) {\n firstCouponEndFixingTime = ((CouponInflationZeroCouponMonthlyGearing) bond.getCoupon().getNthPayment(1)).getReferenceEndTime();\n firstCouponPayementTime = ((CouponInflationZeroCouponMonthlyGearing) bond.getCoupon().getNthPayment(1)).getPaymentTime();\n }\n final double lag = firstCouponPayementTime - firstCouponEndFixingTime;\n final double v = 1 / (1 + yield / bond.getCouponPerYear());\n final double rpibase = bond.getIndexStartValue();\n final double rpiLast = bond.getLastIndexKnownFixing();\n final double indexRatio = rpiLast / rpibase;\n final int nbMonth = (int) Math.max(Math.round((bond.getLastKnownFixingTime() - bond.getCoupon().getNthPayment(0).getPaymentTime() + lag) * 12), 0.0);\n final double u = Math.pow(1 / (1 + .03), .5);\n final double a = indexRatio * Math.pow(u, 2.0 * nbMonth / 12.0);\n final double firstCashFlow = firstYearFraction * realRate * indexRatio;\n if (bond.getCoupon().getNumberOfPayments() == 1) {\n return Math.pow(u * v, bond.getAccrualFactorToNextCoupon()) * (firstCashFlow + 1) * a / u;\n }\n final double secondYearFraction = bond.getCoupon().getNthPayment(1).getPaymentYearFraction();\n final double secondCashFlow = secondYearFraction * realRate * indexRatio;\n final double vn = Math.pow(v, nbCoupon - 1);\n final double pvAtFirstCoupon = firstCashFlow + secondCashFlow * u * v + a * realRate * v * v * (1 - vn / v) / (1 - v) + a * vn;\n return pvAtFirstCoupon * Math.pow(u * v, bond.getRatioPeriodToNextCoupon());\n }\n if (yieldConvention.equals(UK_IL_BOND)) {\n final double firstYearFraction = bond.getCoupon().getNthPayment(0).getPaymentYearFraction();\n final double realRate = ((CouponInflationGearing) bond.getCoupon().getNthPayment(1)).getFactor();\n // Real rate adjusted by the number of coupons, i.e. annual rate / 2 for UK bonds\n final double firstCashFlow = firstYearFraction * realRate;\n final double v = 1 / (1 + yield / bond.getCouponPerYear());\n if (bond.getCoupon().getNumberOfPayments() == 1) {\n return Math.pow(v, bond.getAccrualFactorToNextCoupon()) * (firstCashFlow + 1);\n }\n final double secondYearFraction = bond.getCoupon().getNthPayment(1).getPaymentYearFraction();\n final double secondCashFlow = secondYearFraction * realRate;\n final double vn = Math.pow(v, nbCoupon - 1);\n final double pvAtFirstCoupon = firstCashFlow + secondCashFlow * v + realRate * v * v * (1 - vn / v) / (1 - v) + vn;\n return pvAtFirstCoupon * Math.pow(v, bond.getAccrualFactorToNextCoupon());\n }\n throw new UnsupportedOperationException(\"The convention \" + bond.getYieldConvention().getName() + \" is not supported.\");\n }\n\n /**\n * Computes the clean price (real or nominal depending on the convention) from the conventional real yield.\n * \n * @param bond\n * The bond security.\n * @param yield\n * The bond yield.\n * @return The clean price.\n */\n public double cleanPriceFromYield(final BondCapitalIndexedSecurity bond, final double yield) {\n Validate.isTrue(bond.getNominal().getNumberOfPayments() == 1, \"Yield: more than one nominal repayment.\");\n final double dirtyPrice = dirtyPriceFromRealYield(bond, yield);\n if (bond.getYieldConvention().equals(INDEX_LINKED_FLOAT)) {\n return cleanNominalPriceFromDirtyNominalPrice(bond, dirtyPrice);\n }\n return cleanRealPriceFromDirtyRealPrice(bond, dirtyPrice);\n }\n\n /**\n * Compute the conventional yield from the dirty price.\n * \n * @param bond\n * The bond security.\n * @param dirtyPrice\n * The bond dirty price.\n * @return The yield.\n */\n public double yieldRealFromDirtyRealPrice(final BondCapitalIndexedSecurity bond, final double dirtyPrice) {\n /**\n * Inner function used to find the yield.\n */\n final Function1D priceResidual = new Function1D() {\n @Override\n public Double evaluate(final Double y) {\n return dirtyPriceFromRealYield(bond, y) - dirtyPrice;\n }\n };\n final double[] range = BRACKETER.getBracketedPoints(priceResidual, -0.05, 0.10);\n final double yield = ROOT_FINDER.getRoot(priceResidual, range[0], range[1]);\n return yield;\n }\n\n /**\n * Computes the present value sensitivity of a capital indexed bound by index estimation and discounting. The sensitivity is the sensitivity of the nominal\n * and the coupons but not the settlement.\n * \n * @param bond\n * The bond.\n * @param provider\n * The provider.\n * @return The present value.\n */\n public MultipleCurrencyInflationSensitivity presentValueCurveSensitivity(final BondCapitalIndexedSecurity bond,\n final InflationIssuerProviderInterface provider) {\n ArgumentChecker.notNull(bond, \"Bond\");\n final InflationProviderInterface creditDiscounting = new InflationProviderDecoratedIssuer(provider, bond.getCurrency(), bond.getIssuerEntity());\n final MultipleCurrencyInflationSensitivity sensitivityNominal = bond.getNominal().accept(PVCSIC, creditDiscounting);\n final MultipleCurrencyInflationSensitivity sensitivityCoupon = bond.getCoupon().accept(PVCSIC, creditDiscounting);\n return sensitivityNominal.plus(sensitivityCoupon);\n }\n\n /**\n * Compute the conventional yield from the clean price.\n * \n * @param bond\n * The bond security.\n * @param cleanPrice\n * The bond clean price.\n * @return The yield.\n */\n public double yieldRealFromCleanPrice(final BondCapitalIndexedSecurity bond, final double cleanPrice) {\n /**\n * Inner function used to find the yield.\n */\n final Function1D priceResidual = new Function1D() {\n @Override\n public Double evaluate(final Double y) {\n return cleanPriceFromYield(bond, y) - cleanPrice;\n }\n };\n final double[] range = BRACKETER.getBracketedPoints(priceResidual, -0.05, 0.10);\n final double yield = ROOT_FINDER.getRoot(priceResidual, range[0], range[1]);\n return yield;\n }\n\n /**\n * Calculates the modified duration from a standard yield.\n * \n * @param bond\n * The bond\n * @param yield\n * The yield\n * @return The modified duration\n */\n public double modifiedDurationFromYieldStandard(final BondCapitalIndexedSecurity bond, final double yield) {\n ArgumentChecker.isTrue(bond.getNominal().getNumberOfPayments() == 1, \"Yield: more than one nominal repayment.\");\n final int nbCoupon = bond.getCoupon().getNumberOfPayments();\n final double nominal = bond.getNominal().getNthPayment(0).getNotional();\n final double factorOnPeriod = 1 + yield / bond.getCouponPerYear();\n double mdAtFirstCoupon = 0;\n double pvAtFirstCoupon = 0;\n for (int loopcpn = 0; loopcpn < nbCoupon; loopcpn++) {\n mdAtFirstCoupon += bond.getCoupon().getNthPayment(loopcpn).getNotional() / Math.pow(factorOnPeriod, loopcpn + 1)\n * (loopcpn + bond.getAccrualFactorToNextCoupon()) / bond.getCouponPerYear();\n pvAtFirstCoupon += bond.getCoupon().getNthPayment(loopcpn).getNotional() / Math.pow(factorOnPeriod, loopcpn);\n }\n mdAtFirstCoupon += nominal / Math.pow(factorOnPeriod, nbCoupon) * (nbCoupon - 1 + bond.getAccrualFactorToNextCoupon()) / bond.getCouponPerYear();\n pvAtFirstCoupon += nominal / Math.pow(factorOnPeriod, nbCoupon - 1);\n final double pv = pvAtFirstCoupon * Math.pow(factorOnPeriod, -bond.getAccrualFactorToNextCoupon());\n final double md = mdAtFirstCoupon * Math.pow(factorOnPeriod, -bond.getAccrualFactorToNextCoupon()) / pv;\n return md;\n }\n\n /**\n * Calculates the modified duration from a standard yield.\n * \n * @param bond\n * The bond\n * @param yield\n * The yield\n * @return The modified duration\n */\n public double modifiedDurationFromYieldFiniteDifference(final BondCapitalIndexedSecurity bond, final double yield) {\n ArgumentChecker.isTrue(bond.getNominal().getNumberOfPayments() == 1, \"Yield: more than one nominal repayment.\");\n final double price = cleanPriceFromYield(bond, yield);\n final double priceplus = cleanPriceFromYield(bond, yield + 10e-6);\n final double priceminus = cleanPriceFromYield(bond, yield - 10e-6);\n return -(priceplus - priceminus) / (2 * price * 10e-6);\n }\n\n /**\n * Calculates the modified duration from a standard yield.\n * \n * @param bond\n * The bond\n * @param yield\n * The yield\n * @return The modified duration\n */\n public double convexityFromYieldFiniteDifference(final BondCapitalIndexedSecurity bond, final double yield) {\n ArgumentChecker.isTrue(bond.getNominal().getNumberOfPayments() == 1, \"Yield: more than one nominal repayment.\");\n ArgumentChecker.isTrue(bond.getNominal().getNumberOfPayments() == 1, \"Yield: more than one nominal repayment.\");\n final double price = cleanPriceFromYield(bond, yield);\n final double priceplus = cleanPriceFromYield(bond, yield + 10e-6);\n final double priceminus = cleanPriceFromYield(bond, yield - 10e-6);\n return (priceplus - 2 * price + priceminus) / (price * 10e-6 * 10e-6);\n }\n\n /**\n * Computes the bill yield from the curves. The yield is in the bill yield convention.\n * \n * @param bond\n * The bond.\n * @param provider\n * The inflation and multi-curves provider.\n * @return The yield.\n */\n public double yieldRealFromCurves(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface provider) {\n ArgumentChecker.notNull(bond, \"Bond\");\n ArgumentChecker.notNull(provider, \"inflation and multi-curves provider\");\n double dirtyPrice;\n if (bond.getYieldConvention().equals(INDEX_LINKED_FLOAT)) {\n dirtyPrice = dirtyNominalPriceFromCurves(bond, provider);\n } else {\n dirtyPrice = dirtyRealPriceFromCurves(bond, provider);\n }\n final double yield = yieldRealFromDirtyRealPrice(bond, dirtyPrice);\n return yield;\n }\n\n /**\n * Computes the modified duration of a bond from the curves.\n * \n * @param bond\n * The bond security.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @return The modified duration.\n */\n public double modifiedDurationFromCurves(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves) {\n final double yield = yieldRealFromCurves(bond, issuerMulticurves);\n return modifiedDurationFromYieldFiniteDifference(bond, yield);\n }\n\n /**\n * Compute the conventional yield from the clean price.\n * \n * @param bond\n * The bond security.\n * @param cleanPrice\n * The bond clean price.\n * @return The yield.\n */\n public double yieldRealFromCleanRealPrice(final BondCapitalIndexedSecurity bond, final double cleanPrice) {\n final double dirtyPrice = dirtyRealPriceFromCleanRealPrice(bond, cleanPrice);\n final double yield = yieldRealFromDirtyRealPrice(bond, dirtyPrice);\n return yield;\n }\n\n /**\n * Computes the modified duration of a bond from the clean price.\n * \n * @param bond\n * The bond security.\n * @param cleanPrice\n * The bond clean price.\n * @return The modified duration.\n */\n public double modifiedDurationFromCleanPrice(final BondCapitalIndexedSecurity bond, final double cleanPrice) {\n final double yield = yieldRealFromCleanPrice(bond, cleanPrice);\n return modifiedDurationFromYieldFiniteDifference(bond, yield);\n }\n\n /**\n * Calculates the convexity from a standard yield.\n * \n * @param bond\n * The bond\n * @param yield\n * The yield\n * @return The convexity\n */\n public double convexityFromYieldStandard(final BondCapitalIndexedSecurity bond, final double yield) {\n ArgumentChecker.isTrue(bond.getNominal().getNumberOfPayments() == 1, \"Yield: more than one nominal repayment.\");\n final int nbCoupon = bond.getCoupon().getNumberOfPayments();\n final double nominal = bond.getNominal().getNthPayment(bond.getNominal().getNumberOfPayments() - 1).getNotional();\n final double factorOnPeriod = 1 + yield / bond.getCouponPerYear();\n double cvAtFirstCoupon = 0;\n double pvAtFirstCoupon = 0;\n for (int loopcpn = 0; loopcpn < nbCoupon; loopcpn++) {\n cvAtFirstCoupon += bond.getCoupon().getNthPayment(loopcpn).getNotional() / Math.pow(factorOnPeriod, loopcpn + 2)\n * (loopcpn + bond.getAccrualFactorToNextCoupon()) * (loopcpn + bond.getAccrualFactorToNextCoupon() + 1)\n / (bond.getCouponPerYear() * bond.getCouponPerYear());\n pvAtFirstCoupon += bond.getCoupon().getNthPayment(loopcpn).getNotional() / Math.pow(factorOnPeriod, loopcpn);\n }\n cvAtFirstCoupon += nominal / Math.pow(factorOnPeriod, nbCoupon + 1) * (nbCoupon - 1 + bond.getAccrualFactorToNextCoupon())\n * (nbCoupon + bond.getAccrualFactorToNextCoupon()) / (bond.getCouponPerYear() * bond.getCouponPerYear());\n pvAtFirstCoupon += nominal / Math.pow(factorOnPeriod, nbCoupon - 1);\n final double pv = pvAtFirstCoupon * Math.pow(factorOnPeriod, -bond.getAccrualFactorToNextCoupon());\n final double cv = cvAtFirstCoupon * Math.pow(factorOnPeriod, -bond.getAccrualFactorToNextCoupon()) / pv;\n return cv;\n }\n\n /**\n * Computes the convexity of a bond from the curves.\n * \n * @param bond\n * The bond security.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @return The convexity.\n */\n public double convexityFromCurves(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves) {\n final double yield = yieldRealFromCurves(bond, issuerMulticurves);\n return convexityFromYieldFiniteDifference(bond, yield);\n }\n\n /**\n * Computes the convexity of a bond from the clean price.\n * \n * @param bond\n * The bond security.\n * @param cleanPrice\n * The bond clean price.\n * @return The convexity.\n */\n public double convexityFromCleanPrice(final BondCapitalIndexedSecurity bond, final double cleanPrice) {\n final double yield = yieldRealFromCleanPrice(bond, cleanPrice);\n return convexityFromYieldFiniteDifference(bond, yield);\n }\n\n /**\n * Computes the present value of a bond security from z-spread. The z-spread is a parallel shift applied to the discounting curve associated to the bond\n * (Issuer Entity). The parallel shift is done in the curve convention.\n * \n * @param bond\n * The bond security.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @param zSpread\n * The z-spread.\n * @return The present value.\n */\n public MultipleCurrencyAmount presentValueFromZSpread(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves,\n final double zSpread) {\n final InflationIssuerProviderIssuerDecoratedSpread issuerShifted = new InflationIssuerProviderIssuerDecoratedSpread(issuerMulticurves,\n bond.getIssuerEntity(), zSpread);\n return presentValue(bond, issuerShifted);\n }\n\n /**\n * Computes the present value of a bond security from z-spread. The z-spread is a parallel shift applied to the discounting curve associated to the bond\n * (Issuer Entity). The parallel shift is done in the curve convention.\n * \n * @param bond\n * The bond security.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @param zSpread\n * The z-spread.\n * @return The present value.\n */\n public double cleanPriceFromZSpread(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves,\n final double zSpread) {\n final InflationIssuerProviderIssuerDecoratedSpread issuerShifted = new InflationIssuerProviderIssuerDecoratedSpread(issuerMulticurves,\n bond.getIssuerEntity(), zSpread);\n return cleanRealPriceFromCurves(bond, issuerShifted);\n }\n\n /**\n * Computes a bond z-spread from the curves and a present value. The z-spread is a parallel shift applied to the discounting curve associated to the bond\n * (Issuer Entity) to match the present value.\n * \n * @param bond\n * The bond.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @param cleanPrice\n * The target clean price.\n * @return The z-spread.\n */\n // TODO : this function have been created specially to match the clean price as it come from bloomberg (ie without specification if it is real or nominal),\n // maybe remove at one point.\n public double zSpreadFromCurvesAndCleanPriceDirect(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves,\n final double cleanPrice) {\n\n ArgumentChecker.notNull(bond, \"Bond\");\n ArgumentChecker.notNull(issuerMulticurves, \"Issuer and multi-curves provider\");\n final Function1D residual = new Function1D() {\n @Override\n public Double evaluate(final Double z) {\n final InflationIssuerProviderIssuerDecoratedSpread issuerShifted = new InflationIssuerProviderIssuerDecoratedSpread(issuerMulticurves,\n bond.getIssuerEntity(), z);\n if (bond.getYieldConvention().equals(INDEX_LINKED_FLOAT)) {\n return cleanNominalPriceFromCurves(bond, issuerShifted) - cleanPrice;\n }\n return cleanRealPriceFromCurves(bond, issuerShifted) - cleanPrice;\n }\n };\n\n final double[] range = BRACKETER.getBracketedPoints(residual, -0.5, 0.5); // Starting range is [-1%, 1%]\n return ROOT_FINDER.getRoot(residual, range[0], range[1]);\n }\n\n /**\n * Computes a bond z-spread from the curves and a present value. The z-spread is a parallel shift applied to the discounting curve associated to the bond\n * (Issuer Entity) to match the present value.\n * \n * @param bond\n * The bond.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @param cleanRealPrice\n * The target clean real price.\n * @return The z-spread.\n */\n public double zSpreadFromCurvesAndCleanRealPriceDirect(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves,\n final double cleanRealPrice) {\n return zSpreadFromCurvesAndPV(bond, issuerMulticurves, presentValueFromCleanRealPrice(bond, issuerMulticurves, cleanRealPrice));\n }\n\n /**\n * Computes a bond z-spread from the curves and a present value. The z-spread is a parallel shift applied to the discounting curve associated to the bond\n * (Issuer Entity) to match the present value.\n * \n * @param bond\n * The bond.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @param cleanRealPrice\n * The target clean real price.\n * @return The z-spread.\n */\n public double zSpreadFromCurvesAndCleanNominalPriceDirect(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves,\n final double cleanRealPrice) {\n return zSpreadFromCurvesAndPV(bond, issuerMulticurves, presentValueFromCleanNominalPrice(bond, issuerMulticurves, cleanRealPrice));\n }\n\n /**\n * Computes a bond z-spread from the curves and a present value. The z-spread is a parallel shift applied to the discounting curve associated to the bond\n * (Issuer Entity) to match the present value.\n * \n * @param bond\n * The bond.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @param pv\n * The target present value.\n * @return The z-spread.\n */\n public double zSpreadFromCurvesAndPV(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves,\n final MultipleCurrencyAmount pv) {\n ArgumentChecker.notNull(bond, \"Bond\");\n ArgumentChecker.notNull(issuerMulticurves, \"Issuer and multi-curves provider\");\n final Currency ccy = bond.getCurrency();\n\n final Function1D residual = new Function1D() {\n @Override\n public Double evaluate(final Double z) {\n return presentValueFromZSpread(bond, issuerMulticurves, z).getAmount(ccy) - pv.getAmount(ccy);\n }\n };\n\n final double[] range = BRACKETER.getBracketedPoints(residual, -0.5, 0.5); // Starting range is [-1%, 1%]\n return ROOT_FINDER.getRoot(residual, range[0], range[1]);\n }\n\n /**\n * Computes a bond z-spread from the curves and a clean price. The z-spread is a parallel shift applied to the discounting curve associated to the bond\n * (Issuer Entity) to match the CleanPrice present value.\n * \n * @param bond\n * The bond.\n * @param issuerMulticurves\n * The issuer and multi-curves provider.\n * @param cleanPrice\n * The target clean price.\n * @return The z-spread.\n */\n public double zSpreadFromCurvesAndCleanPrice(final BondCapitalIndexedSecurity bond, final InflationIssuerProviderInterface issuerMulticurves,\n final double cleanPrice) {\n return zSpreadFromCurvesAndPV(bond, issuerMulticurves, presentValueFromCleanRealPrice(bond, issuerMulticurves, cleanPrice));\n }\n\n}\n", "meta": {"content_hash": "bf41cecbb9b17767ce1c32de08ed3c2e", "timestamp": "", "source": "github", "line_count": 803, "max_line_length": 159, "avg_line_length": 46.39975093399751, "alnum_prop": 0.7211680399366596, "repo_name": "McLeodMoores/starling", "id": "75c2be71172ed67d5e22d850113a1b455ad8ea62", "size": "37396", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "projects/analytics/src/main/java/com/opengamma/analytics/financial/interestrate/bond/provider/BondCapitalIndexedSecurityDiscountingMethod.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2505"}, {"name": "CSS", "bytes": "213501"}, {"name": "FreeMarker", "bytes": "310184"}, {"name": "GAP", "bytes": "1490"}, {"name": "Groovy", "bytes": "11518"}, {"name": "HTML", "bytes": "318295"}, {"name": "Java", "bytes": "79541905"}, {"name": "JavaScript", "bytes": "1511230"}, {"name": "PLSQL", "bytes": "398"}, {"name": "PLpgSQL", "bytes": "26901"}, {"name": "Shell", "bytes": "11481"}, {"name": "TSQL", "bytes": "604117"}]}} {"text": "\ufeffusing System.Windows.Controls;\n\nnamespace EveMarket.Views\n{\n /// \n /// Interaction logic for ItemInfoView.xaml\n /// \n public partial class ItemInfoView : UserControl\n {\n public ItemInfoView()\n {\n InitializeComponent();\n }\n }\n}", "meta": {"content_hash": "7ba1f05fd37f11bdb5e380117173db13", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 51, "avg_line_length": 19.733333333333334, "alnum_prop": 0.5945945945945946, "repo_name": "Capgemini/Cauldron", "id": "a53b3cb45e415caef3f0f5bc85b72982a46f8614", "size": "298", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "Old/Desktop/Samples/EveMarket.Views/ItemInfoView.xaml.cs", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "603"}, {"name": "C#", "bytes": "6246714"}, {"name": "PowerShell", "bytes": "8223"}]}} {"text": "/**\n * \n */\npackage com.aspose.cloud.sdk.slides.api;\n\nimport android.net.Uri;\n\nimport com.aspose.cloud.sdk.common.AsposeApp;\nimport com.aspose.cloud.sdk.common.Utils;\nimport com.aspose.cloud.sdk.slides.model.MergePresentationsRequest;\nimport com.aspose.cloud.sdk.slides.model.MergeSelectedSlidesOfPowerPointPresentationsRequest;\nimport com.aspose.cloud.sdk.slides.model.SplitPowerPointPresentationsResponse;\nimport com.aspose.cloud.sdk.slides.model.SplitPowerPointPresentationsResponse.SplitResult;\nimport com.aspose.cloud.sdk.slides.model.ValidSlidesFormats;\nimport com.aspose.cloud.sdk.slides.model.DocumentResponse;\nimport com.aspose.cloud.sdk.slides.model.DocumentResponse.DocumentModel;\nimport com.aspose.cloud.sdk.slides.model.ValidFormatsEnum;\n\nimport com.google.gson.Gson;\nimport com.google.gson.GsonBuilder;\n\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.security.InvalidKeyException;\nimport java.security.NoSuchAlgorithmException;\nimport java.util.HashMap;\n\n/**\n * Document --- Using this class you can create a new empty PowerPoint presentation, convert PowerPoint document to other File formats, \n * merge multiple PowerPoint presentation files and split all or specific slides of a presentation file\n * @author M. Sohail Ismail\n */\npublic class Document {\n\t\n\tprivate static final String SLIDES_URI = AsposeApp.BASE_PRODUCT_URI + \"/slides/\";\n\tpublic static final String TEXTCOMPRESSION_KEY = \"TextCompression\";\n\tpublic static final String EMBEDFULLFONTS_KEY = \"EmbedFullFonts\";\n\tpublic static final String COMPLIANCE_KEY = \"Compliance\";\n\tpublic static final String JPEGQUALITY_KEY = \"JpegQuality\";\n\tpublic static final String SAVEMETAFILESASPNG_KEY = \"SaveMetafilesAsPng\";\n\tpublic static final String PDFPASSWORD_KEY = \"PdfPassword\";\n\tpublic static final String EMBEDTRUETYPEFONTSFORASCII_KEY = \"EmbedTrueTypeFontsForASCII\";\n\t\n\t/**\n\t * Create a new empty PowerPoint presentation\n\t * @param fileName Name of the file stored on cloud\n\t * @throws java.security.InvalidKeyException If initialization fails because the provided key is null.\n\t * @throws java.security.NoSuchAlgorithmException If the specified algorithm (HmacSHA1) is not available by any provider.\n\t * @throws java.io.IOException If there is an IO error\n\t * @return An object that contains URLs to document properties, slides and images and alternative links to download document in other formats\n\t*/ \n\tpublic static DocumentModel createEmptyPowerPointPresentation(String fileName) throws InvalidKeyException, NoSuchAlgorithmException, IOException {\n\t\t\n\t\tDocumentModel document = null;\n\t\t\n\t\tif(fileName == null || fileName.length() == 0) {\n\t\t\tthrow new IllegalArgumentException(\"File name cannot be null or empty\");\n\t\t}\n\t\t\n\t\t//build URL\n\t\tString strURL = SLIDES_URI + Uri.encode(fileName);\n\t\t//sign URL\n\t\tString signedURL = Utils.sign(strURL);\n\t\tInputStream responseStream = Utils.processCommand(signedURL, \"PUT\");\n\t\tString jsonStr = Utils.streamToString(responseStream);\n\t\t\n\t\t//Parsing JSON\n\t\tGson gson = new Gson();\n\t\tDocumentResponse documentResponse = gson.fromJson(jsonStr, DocumentResponse.class);\n\t\tif(documentResponse.getCode().equals(\"201\") && documentResponse.getStatus().equals(\"Created\")) {\n\t\t\tdocument = documentResponse.document;\n\t\t}\n\t\t\n\t\treturn document;\n\t}\n\t\n\t/**\n\t * Convert PowerPoint document to other File formats \n\t * @param fileName Name of the file stored on cloud\n\t * @param designatedFormat Valid formats are tiff, pdf, xps, odp, ppsx, pptm, ppsm, potx, potm and html\n\t * @param storageName If file is stored at third party storage e.g. Amazon S3, Azure, Dropbox, Google Drive or FTP\n\t * @param folderName Path to file if file is not stored at root\n\t * @throws java.security.InvalidKeyException If initialization fails because the provided key is null.\n\t * @throws java.security.NoSuchAlgorithmException If the specified algorithm (HmacSHA1) is not available by any provider.\n\t * @throws java.io.IOException If there is an IO error\n\t * @return A path to converted document\n\t*/ \n\tpublic static String convertPowerPointDocumentToOtherFileFormats(String fileName, ValidFormatsEnum designatedFormat, String storageName, String folderName) throws InvalidKeyException, NoSuchAlgorithmException, IOException {\n\t\t\n\t\tString localFilePath = null;\n\t\t\n\t\tif(fileName == null || fileName.length() == 0) {\n\t\t\tthrow new IllegalArgumentException(\"File name cannot be null or empty\");\n\t\t}\n\t\t\n\t\tif(designatedFormat == null) {\n\t\t\tthrow new IllegalArgumentException(\"Designated format cannot be null\");\n\t\t}\n\t\t\n\t\t//build URL\n\t\tStringBuilder strURL = new StringBuilder(SLIDES_URI + Uri.encode(fileName) + \"?format=\" + designatedFormat);\n\t\t//If document is on the third party storage\n\t\tif(storageName != null && storageName.length() != 0) {\n\t\t\tstrURL.append(\"&storage=\" + storageName);\n\t\t}\n\t\t//In case if file is not at root folder\n\t\tif(folderName != null && folderName.length() != 0) {\n\t\t\tstrURL.append(\"&folder=\" + folderName);\n\t\t}\n\n\t\t//sign URL\n\t\tString signedURL = Utils.sign(strURL.toString());\n\t\tInputStream responseStream = Utils.processCommand(signedURL, \"GET\");\n\t\t\n\t\t//Replace fileName extension with designated format \n\t\tString[] fileNameAndItsExtensionArray = fileName.split(\"\\\\.\");\n\t\tfileName = fileNameAndItsExtensionArray[0] + \".\" + designatedFormat;\n\t\t\n\t\t//Save file on Disk\n\t\tlocalFilePath = Utils.saveStreamToFile(responseStream, fileName);\n\t\treturn localFilePath;\n\t}\n\t\n\t/**\n\t * Convert PowerPoint document to other file formats with additional settings\n\t * @param fileName Name of the file stored on cloud\n\t * @param designatedFormat Valid formats are tiff, pdf, xps, odp, ppsx, pptm, ppsm, potx, potm and html\n\t * @param exportOptions Depends of parameter \"format\" service can receive export options\n\t * @throws java.security.InvalidKeyException If initialization fails because the provided key is null.\n\t * @throws java.security.NoSuchAlgorithmException If the specified algorithm (HmacSHA1) is not available by any provider.\n\t * @throws java.io.IOException If there is an IO error\n\t * @return A path to converted document\n\t*/ \n\tpublic static String convertPowerPointDocumentToOtherFileFormatsWithAdditionalSettings(String fileName, ValidFormatsEnum designatedFormat, HashMap exportOptions) throws InvalidKeyException, NoSuchAlgorithmException, IOException {\n\t\t\n\t\tString localFilePath = null;\n\t\t\n\t\tif(fileName == null || fileName.length() <= 3) {\n\t\t\tthrow new IllegalArgumentException(\"File name cannot be null or empty\");\n\t\t}\n\t\t\n\t\tif(designatedFormat == null) {\n\t\t\tthrow new IllegalArgumentException(\"Designated format cannot be null\");\n\t\t}\n\t\t\n\t\t//build URL\n\t\tStringBuilder strURL = new StringBuilder(SLIDES_URI + Uri.encode(fileName) + \"?format=\" + designatedFormat);\n\t\tif(exportOptions.get(TEXTCOMPRESSION_KEY) != null) {\n\t\t\tstrURL.append(\"&TextCompression=\" + exportOptions.get(TEXTCOMPRESSION_KEY));\n\t\t}\n\t\tif(exportOptions.get(EMBEDFULLFONTS_KEY) != null) {\n\t\t\tstrURL.append(\"&EmbedFullFonts=\" + exportOptions.get(EMBEDFULLFONTS_KEY));\n\t\t}\n\t\tif(exportOptions.get(COMPLIANCE_KEY) != null) {\n\t\t\tstrURL.append(\"&Compliance=\" + exportOptions.get(COMPLIANCE_KEY));\n\t\t}\n\t\tif(exportOptions.get(JPEGQUALITY_KEY) != null) {\n\t\t\tstrURL.append(\"&JpegQuality=\" + exportOptions.get(JPEGQUALITY_KEY));\n\t\t}\n\t\tif(exportOptions.get(SAVEMETAFILESASPNG_KEY) != null) {\n\t\t\tstrURL.append(\"&SaveMetafilesAsPng=\" + exportOptions.get(SAVEMETAFILESASPNG_KEY));\n\t\t}\n\t\tif(exportOptions.get(PDFPASSWORD_KEY) != null) {\n\t\t\tstrURL.append(\"&PdfPassword=\" + exportOptions.get(PDFPASSWORD_KEY));\n\t\t}\n\t\tif(exportOptions.get(EMBEDTRUETYPEFONTSFORASCII_KEY) != null) {\n\t\t\tstrURL.append(\"&EmbedTrueTypeFontsForASCII=\" + exportOptions.get(EMBEDTRUETYPEFONTSFORASCII_KEY));\n\t\t}\n\t\t\n\t\t//sign URL\n\t\tString signedURL = Utils.sign(strURL.toString());\n\t\tInputStream responseStream = Utils.processCommand(signedURL, \"GET\");\n\t\t\n\t\t//Replace fileName extension with designated format \n\t\tString[] fileNameAndItsExtensionArray = fileName.split(\"\\\\.\");\n\t\tfileName = fileNameAndItsExtensionArray[0] + \".\" + designatedFormat;\n\t\t\n\t\t//Save file on Disk\n\t\tlocalFilePath = Utils.saveStreamToFile(responseStream, fileName);\n\t\treturn localFilePath;\n\t}\n\n\t/**\n\t * Convert PowerPoint document stored on device to other file formats\n\t * @param localFilePath Name of the file stored on device\n\t * @param designatedFormat Valid formats are tiff, pdf, xps, odp, ppsx, pptm, ppsm, potx, potm and html\n\t * @throws java.security.InvalidKeyException If initialization fails because the provided key is null.\n\t * @throws java.security.NoSuchAlgorithmException If the specified algorithm (HmacSHA1) is not available by any provider.\n\t * @throws java.io.IOException If there is an IO error\n\t * @return A path to converted document\n\t*/ \n\tpublic static String convertLocallyStoredPowerPointDocumentToOtherFileFormats(String localFilePath, ValidFormatsEnum designatedFormat) throws InvalidKeyException, NoSuchAlgorithmException, IOException {\n\t\t\n\t\tString updatedFilePath = null;\n\t\t\n\t\tif(localFilePath == null || localFilePath.length() == 0) {\n\t\t\tthrow new IllegalArgumentException(\"Local file path cannot be null or empty\");\n\t\t}\n\t\t\n\t\tif(designatedFormat == null) {\n\t\t\tthrow new IllegalArgumentException(\"Designated format cannot be null\");\n\t\t}\n\t\t\n\t\t//Build URI \n\t\tString strURL = SLIDES_URI + \"convert?format=\" + designatedFormat;\n\t\t//Sign the request URI\n\t\tString signedURL = Utils.sign(strURL);\t\n\t\t//Convert the local file to InputStream\n\t\tInputStream fileStream = new FileInputStream(localFilePath);\n\t\t//Process the request on server\n\t\tInputStream responseStream = Utils.processCommand(signedURL, \"POST\", fileStream);\n\t\t//Get fileName from localFilePath\n\t\tString fileName;\n\t\tint index = localFilePath.lastIndexOf(\"/\");\n\t\tif(index != -1) {\n\t\t\tfileName = localFilePath.substring(index+1);\n\t\t} else {\n\t\t\tfileName = localFilePath;\n\t\t}\n\t\t//Replace fileName extension with designated format \n\t\tString[] fileNameAndItsExtensionArray = fileName.split(\"\\\\.\");\n\t\tfileName = fileNameAndItsExtensionArray[0] + \".\" + designatedFormat;\n\t\t\t\t\n\t\t//Save the stream in response to the disk\n\t\tupdatedFilePath = Utils.saveStreamToFile(responseStream, fileName);\n\t\t\n\t\treturn updatedFilePath;\n\t}\n\t\n\t/**\n\t * You can merge multiple PowerPoint presentation files\n\t * @param fileName Name of the file stored on cloud\n\t * @param mergePresentationsRequest Contains an array of PowerPoint presentations to be merged with\n\t * @throws java.security.InvalidKeyException If initialization fails because the provided key is null.\n\t * @throws java.security.NoSuchAlgorithmException If the specified algorithm (HmacSHA1) is not available by any provider.\n\t * @throws java.io.IOException If there is an IO error\n\t * @return An object that contains URLs to document properties, slides and images and alternative links to download document in other formats\n\t*/ \n\tpublic static DocumentModel mergePowerPointPresentations(String fileName, MergePresentationsRequest mergePresentationsRequest) throws InvalidKeyException, NoSuchAlgorithmException, IOException {\n\t\t\n\t\tDocumentModel document = null;\n\t\t\n\t\tif(fileName == null || fileName.length() == 0) {\n\t\t\tthrow new IllegalArgumentException(\"File name cannot be null or empty\");\n\t\t}\n\t\t\n\t\tif(mergePresentationsRequest == null) {\n\t\t\tthrow new IllegalArgumentException(\"Merge presentations request cannot be null\");\n\t\t}\n\t\t\n\t\tGsonBuilder builder = new GsonBuilder();\n Gson gson = builder.create();\n String requestJSONString = gson.toJson(mergePresentationsRequest, MergePresentationsRequest.class);\n \n //Build URI \n \tString strURL = SLIDES_URI + Uri.encode(fileName) + \"/merge\";\n \t//Sign the request URI\n \tString signedURL = Utils.sign(strURL);\t\n \t\t\n InputStream responseStream = Utils.processCommand(signedURL, \"POST\", requestJSONString);\n String responseJSONString = Utils.streamToString(responseStream);\n\t\t\n //Parsing JSON\n\t\tDocumentResponse documentResponse = gson.fromJson(responseJSONString, DocumentResponse.class);\n\t\tif(documentResponse.getCode().equals(\"200\") && documentResponse.getStatus().equals(\"OK\")) {\n\t\t\tdocument = documentResponse.document;\n\t\t}\n\t\t\n\t\treturn document;\n\t}\n\t\n\t/**\n\t * Take selected slides from multiple presentation files and merge into another presentation\n\t * @param fileName Name of the file stored on cloud\n\t * @param mergePresentationsRequest Contains an array of PowerPoint presentations to be merged with\n\t * @throws java.security.InvalidKeyException If initialization fails because the provided key is null.\n\t * @throws java.security.NoSuchAlgorithmException If the specified algorithm (HmacSHA1) is not available by any provider.\n\t * @throws java.io.IOException If there is an IO error\n\t * @return An object that contains URLs to document properties, slides and images and alternative links to download document in other formats\n\t*/ \n\tpublic static DocumentModel mergeSelectedSlidesOfPowerPointPresentations(String fileName, MergeSelectedSlidesOfPowerPointPresentationsRequest mergePresentationsRequest) throws InvalidKeyException, NoSuchAlgorithmException, IOException {\n\t\t\n\t\tDocumentModel document = null;\n\t\t\n\t\tif(fileName == null || fileName.length() == 0) {\n\t\t\tthrow new IllegalArgumentException(\"File name cannot be null or empty\");\n\t\t}\n\t\t\n\t\tif(mergePresentationsRequest == null) {\n\t\t\tthrow new IllegalArgumentException(\"Merge presentations request cannot be null\");\n\t\t}\n\t\t\n\t\tGsonBuilder builder = new GsonBuilder();\n Gson gson = builder.create();\n String requestJSONString = gson.toJson(mergePresentationsRequest, MergeSelectedSlidesOfPowerPointPresentationsRequest.class);\n \n //Build URI \n \tString strURL = SLIDES_URI + Uri.encode(fileName) + \"/merge\";\n \t//Sign the request URI\n \tString signedURL = Utils.sign(strURL);\t\n \t\t\n InputStream responseStream = Utils.processCommand(signedURL, \"PUT\", requestJSONString);\n String responseJSONString = Utils.streamToString(responseStream);\n\t\t\n //Parsing JSON\n\t\tDocumentResponse documentResponse = gson.fromJson(responseJSONString, DocumentResponse.class);\n\t\tif(documentResponse.getCode().equals(\"200\") && documentResponse.getStatus().equals(\"OK\")) {\n\t\t\tdocument = documentResponse.document;\n\t\t}\n\t\t\n\t\treturn document;\n\t}\n\t\n\t/**\n\t * Split all slides of a presentation file and save each slide as a new HTML or any supported image format\n\t * @param fileName Name of the file stored on cloud\n\t * @throws java.security.InvalidKeyException If initialization fails because the provided key is null.\n\t * @throws java.security.NoSuchAlgorithmException If the specified algorithm (HmacSHA1) is not available by any provider.\n\t * @throws java.io.IOException If there is an IO error\n\t * @return An object that contains URLs to slides\n\t*/ \n\tpublic static SplitResult splitPowerPointPresentations(String fileName) throws InvalidKeyException, NoSuchAlgorithmException, IOException {\n\t\t\n\t\tSplitResult splitResult = null;\n\t\t\n\t\tif(fileName == null || fileName.length() <= 3) {\n\t\t\tthrow new IllegalArgumentException(\"File name cannot be null or empty\");\n\t\t}\n\t\t\n\t\t//build URL\n\t\tString strURL = SLIDES_URI + Uri.encode(fileName) + \"/split\";\n\t\t//sign URL\n\t\tString signedURL = Utils.sign(strURL);\n\t\t\n\t\tInputStream responseStream = Utils.processCommand(signedURL, \"POST\");\n\t\tString responseJSONString = Utils.streamToString(responseStream);\n\t\t\n\t\t//Parsing JSON\n\t\tGson gson = new Gson();\n\t\tSplitPowerPointPresentationsResponse splitPowerPointPresentationResponse = gson.fromJson(responseJSONString, SplitPowerPointPresentationsResponse.class);\n\t\tif(splitPowerPointPresentationResponse.getCode().equals(\"200\") && splitPowerPointPresentationResponse.getStatus().equals(\"OK\")) {\n\t\t\tsplitResult = splitPowerPointPresentationResponse.splitResult;\n\t\t}\n\t\t\n\t\treturn splitResult;\n\t}\n\t\n\t/**\n\t * Split specific slides of a presentation file and save each slide as a new HTML or any supported image format\n\t * @param fileName Name of the file stored on cloud\n\t * @param from The start slide number for splitting\n\t * @param to The last slide number for splitting\n\t * @param designatedFormat Valid formats are tiff, jpeg, png, bmp and gif\n\t * @throws java.security.InvalidKeyException If initialization fails because the provided key is null.\n\t * @throws java.security.NoSuchAlgorithmException If the specified algorithm (HmacSHA1) is not available by any provider.\n\t * @throws java.io.IOException If there is an IO error\n\t * @return An object that contains URLs to slides\n\t*/\n\tpublic static SplitResult splitPowerPointPresentations(String fileName, int from, int to, ValidSlidesFormats designatedFormat) throws InvalidKeyException, NoSuchAlgorithmException, IOException {\n\t\t\n\t\tSplitResult splitResult = null;\n\t\t\n\t\tif(fileName == null || fileName.length() <= 3) {\n\t\t\tthrow new IllegalArgumentException(\"File name cannot be null or empty\");\n\t\t}\n\t\t\n\t\tif(designatedFormat == null) {\n\t\t\tthrow new IllegalArgumentException(\"Designated format cannot be null\");\n\t\t}\n\t\t\n\t\t//build URL\n\t\tString strURL = SLIDES_URI + Uri.encode(fileName) + \"/split?from=\" + from + \"&to=\" + to + \"&format=\" + designatedFormat;\n\t\t//sign URL\n\t\tString signedURL = Utils.sign(strURL);\n\t\t\n\t\tInputStream responseStream = Utils.processCommand(signedURL, \"POST\");\n\t\tString responseJSONString = Utils.streamToString(responseStream);\n\t\t\n\t\t//Parsing JSON\n\t\tGson gson = new Gson();\n\t\tSplitPowerPointPresentationsResponse splitPowerPointPresentationResponse = gson.fromJson(responseJSONString, SplitPowerPointPresentationsResponse.class);\n\t\tif(splitPowerPointPresentationResponse.getCode().equals(\"200\") && splitPowerPointPresentationResponse.getStatus().equals(\"OK\")) {\n\t\t\tsplitResult = splitPowerPointPresentationResponse.splitResult;\n\t\t}\n\t\t\n\t\treturn splitResult;\n\t}\n}", "meta": {"content_hash": "4e9a9c40037494141cc182d3d22182b4", "timestamp": "", "source": "github", "line_count": 389, "max_line_length": 246, "avg_line_length": 45.52956298200514, "alnum_prop": 0.7584551973349896, "repo_name": "asposeforcloud/Aspose_Cloud_SDK_For_Android", "id": "910d43031fde05302ba3e225638fc69affb2ed36", "size": "17711", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "asposecloudsdk/src/main/java/com/aspose/cloud/sdk/slides/api/Document.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "1039150"}]}} {"text": "import React from 'react'\r\nimport ReactDOM from 'react-dom'\r\nimport {Provider} from 'react-redux'\r\nimport store from './store'\r\nimport AppContainer from './components/containers/AppContainer'\r\nimport './css/index.css'\r\n\r\nReactDOM.render(\r\n \r\n \r\n , \r\n document.getElementById('root')\r\n);", "meta": {"content_hash": "a1e7a363296d902730d59fc1d1902d6a", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 63, "avg_line_length": 26.46153846153846, "alnum_prop": 0.7063953488372093, "repo_name": "TheHappyKoala/Gravity-Playground", "id": "0b57d94f377e15bf5a05bd5c9b2d64d07cd55ca0", "size": "344", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/index.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "4821"}, {"name": "HTML", "bytes": "1018"}, {"name": "JavaScript", "bytes": "73640"}]}} {"text": "package org.jeo.feature;\n\nimport java.util.List;\nimport java.util.Map;\n\nimport org.osgeo.proj4j.CoordinateReferenceSystem;\n\nimport com.vividsolutions.jts.geom.Geometry;\n\n/**\n * An object consisting of a set of named attributes, any of which may be a vector geometry. \n * \n * @see ListFeature\n * @see MapFeature\n * \n * @author Justin Deoliveira, OpenGeo\n */\npublic interface Feature {\n\n /**\n * Feature identifier.\n */\n String getId();\n\n /**\n * Coordinate reference system for the feature.\n *

\n * Generally the method {@link #crs()} should be used.\n *

\n * @return The crs, or null if none been set.\n */\n CoordinateReferenceSystem getCRS();\n\n /**\n * Sets the coordinate reference system for the feature.\n */\n void setCRS(CoordinateReferenceSystem crs);\n\n /**\n * The derived coordinate reference system for the feature.\n *

\n * If {@link #getCRS()} returns a value it is returned, otherwise if the feature has a \n * schema object then {@link Schema#crs()} is returned. Otherwise this method returns \n * null.\n *

\n * @return The derived crs.\n */\n CoordinateReferenceSystem crs();\n\n /**\n * Gets a named attribute of the feature.\n *

\n * This method should return null if no such attribute named key exists.\n *

\n * @param key The key or name of the attribute.\n * \n * @return The attribute value or null.\n */\n Object get(String key);\n\n /**\n * Geometry of the feature.\n *\n * @return a {@link Geometry} object, or null if the feature has no geometry.\n */\n Geometry geometry();\n\n /**\n * Sets a named attribute of the feature.\n *\n * @param key The key or name of the attribute. \n * @param val The new value of the attribute. \n */\n void put(String key, Object val);\n\n /**\n * Sets the geometry of the feature.\n */\n void put(Geometry g);\n\n /**\n * The created schema for the feature. \n */\n Schema schema();\n\n /**\n * Returns an immutable list view of the feature\n */\n abstract List list();\n\n /**\n * Returns an immutable map view of the feature.\n */\n abstract Map map();\n}\n", "meta": {"content_hash": "25b85ea7cf0616e0c9a13c9dc7e429af", "timestamp": "", "source": "github", "line_count": 95, "max_line_length": 98, "avg_line_length": 24.305263157894736, "alnum_prop": 0.6071892594196622, "repo_name": "geosolutions-it/jeo", "id": "74c496f62ee7efccdc5715f2f38fcd9106a3008b", "size": "2309", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/src/main/java/org/jeo/feature/Feature.java", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "\npackage org.hl7.hibernate;\n\nimport java.lang.reflect.Method;\nimport java.util.List;\nimport org.hl7.types.BAG;\nimport org.hl7.types.impl.BAGjuListAdapter;\nimport org.hl7.types.impl.BAGnull;\n\n/**\n * Access a BAG property and wrap/unwrap the underlying bag.\n * \n * @author Gunther Schadow\n */\npublic class WrappingBagAccessor extends StrategicPropertyAccessor {\n\n private static class BAGWrapper implements\n\t\t\t\t StrategicPropertyAccessor.FieldWrappingStrategy {\n\n public Object bean2DBValue(Object beanValue) {\n if (beanValue == null) {\n\treturn null;\n } else if (beanValue instanceof BAG) {\n\tfinal BAG bagValue = (BAG) beanValue;\n\tif (bagValue.isNull().isTrue()) {\n\t return null;\n\t} else if (bagValue instanceof BAGjuListAdapter) {\n\t return ((BAGjuListAdapter) bagValue)._hibernateUnwrap();\n\t}\n }\n\n throw new RuntimeException(\"class not supported here: \" + beanValue.getClass());\n }\n\n public Object db2BeanValue(Object dbValue) {\n return dbValue == null ? BAGnull.NI : BAGjuListAdapter._hibernateWrap((List) dbValue);\n }\n\n }\n \n @Override\n public StrategicPropertyAccessor.FieldWrappingStrategy getWrappingStrategy() {\n return new BAGWrapper();\n }\n}\n", "meta": {"content_hash": "9518e9df3676f438cab9914e3904dada", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 92, "avg_line_length": 26.666666666666668, "alnum_prop": 0.7208333333333333, "repo_name": "markusgumbel/dshl7", "id": "d61c50bee9ffa8a9167a68a55c859b9be95ddbd7", "size": "1942", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hl7-javasig/src/org/hl7/hibernate/WrappingBagAccessor.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Groovy", "bytes": "25055"}, {"name": "Java", "bytes": "3897443"}, {"name": "Perl", "bytes": "9821"}, {"name": "Python", "bytes": "3285"}, {"name": "Scala", "bytes": "100505"}, {"name": "Shell", "bytes": "974"}]}} {"text": "/*\n * To change this template, choose Tools | Templates\n * and open the template in the editor.\n */\npackage MyProcess;\n\nimport java.io.BufferedInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.File;\nimport java.io.FileOutputStream;\nimport java.io.InputStream;\nimport java.net.URL;\nimport java.sql.Connection;\nimport java.sql.DriverManager;\nimport java.sql.ResultSet;\nimport java.sql.Statement;\n\n/**\n *\n * @author amungen\n */\npublic class GetImagesT extends Thread {\n\n String generalpath = \"/Volumes/Levent-Taha/Flicker/\";\n\n public void run() {\n allprocess();\n }\n\n public static void main(String[] args) {\n GetImagesT g = new GetImagesT();\n g.allprocess();\n // g.deleteimage(\"adadada\");\n }\n\n public void allprocess() {\n // for (int i = 400; i < 700; i++) { createnewfolder(i+\"\"); }\n // for (int i = 0; i < 1000; i++) {\n\n downloadimages();\n // }\n }\n\n public void downloadimages() {\n Connection conn = null;\n Statement st = null;\n ResultSet rs = null;\n try {\n Class.forName(\"com.mysql.jdbc.Driver\").newInstance();\n String dbUrl = \"jdbc:mysql://omitechnology.com/omitechn_flick?useUnicode=true&characterEncode=UTF-8\";\n conn = DriverManager.getConnection(dbUrl, \"omite_flick\", \"flick\");\n conn.createStatement();\n st = conn.createStatement();\n int sayac = 0;\n rs = st.executeQuery(\"Select url_o,ID,PHOTOID FROM photos WHERE downimage=0\");\n int id;\n String folderpath = \"1\";\n String photoid;\n String link;\n\n while (rs.next()) {\n id = rs.getInt(\"id\");\n photoid = rs.getString(\"photoid\");\n folderpath = ((id / 500) + 1) + \"/\";\n link = rs.getString(\"url_o\");\n if (link.length() > 10) {\n singledownloadimage(id, link, folderpath, photoid);\n sayac++;\n if (sayac % 10 == 0) {\n System.out.println(\"Toplam Basarili Download = \" + sayac);\n }\n if (sayac % 4 == 0) {\n System.out.println(\"Break\");\n break;\n\n }\n\n\n\n }\n }\n conn.close();\n rs.close();\n st.close();\n } catch (Exception e) {\n e.printStackTrace();\n }\n\n\n\n\n\n }\n\n public void singledownloadimage(int ID, String link, String folderpath, String photoid) {\n try {\n // System.out.println(\"link = \" + link);\n // System.out.println(\"1 = \" + link.lastIndexOf(\".\"));\n // System.out.println(\"2 = \" + link.length());\n String extention = link.substring(link.lastIndexOf(\".\"), link.length());\n // System.out.println(\"extention = \" + extention);\n URL url = new URL(link);\n InputStream in = new BufferedInputStream(url.openStream());\n ByteArrayOutputStream out = new ByteArrayOutputStream();\n byte[] buf = new byte[1024];\n int n = 0;\n while (-1 != (n = in.read(buf))) {\n out.write(buf, 0, n);\n }\n out.close();\n in.close();\n byte[] response = out.toByteArray();\n if (response.length > 10000) {\n FileOutputStream fos = new FileOutputStream(generalpath + folderpath + photoid + extention);\n fos.write(response);\n fos.close();\n updateimagestatus(ID);\n } else {\n deleteimage(photoid);\n }\n\n } catch (Exception e) {\n e.printStackTrace();\n }\n\n }\n\n public void createnewfolder(String foldername) {\n try {\n // File newDir = new File(\"/Users/amungen/Desktop/flicker/\" + foldername);\n File newDir = new File(generalpath + foldername);\n boolean success = newDir.mkdir();\n\n } catch (Exception e) {\n e.printStackTrace();\n }\n\n }\n\n public void updateimagestatus(int ID) {\n Connection conn = null;\n Statement st = null;\n ResultSet rs = null;\n try {\n // omitechn_youtubecraw\t\tomite_youtubecra\t ahmet\n\n Class.forName(\"com.mysql.jdbc.Driver\").newInstance();\n String dbUrl = \"jdbc:mysql://omitechnology.com/omitechn_flick?useUnicode=true&characterEncode=UTF-8\";\n conn = DriverManager.getConnection(dbUrl, \"omite_flick\", \"flick\");\n st = conn.createStatement();\n st.executeUpdate(\"UPDATE photos set downimage=1 WHERE id='\" + ID + \"'\");\n\n\n if (rs != null) {\n rs.close();\n }\n if (st != null) {\n st.close();\n }\n if (conn != null) {\n conn.close();\n }\n\n\n } catch (Exception e) {\n e.printStackTrace();\n }\n\n\n }\n\n public void deleteimage(String imageID) {\n //DELETE FROM photos WHERE photoid='+imageID;\n Connection conn = null;\n Statement st = null;\n ResultSet rs = null;\n try {\n\n Class.forName(\"com.mysql.jdbc.Driver\").newInstance();\n String dbUrl = \"jdbc:mysql://omitechnology.com/omitechn_flick?useUnicode=true&characterEncode=UTF-8\";\n conn = DriverManager.getConnection(dbUrl, \"omite_flick\", \"flick\");\n st = conn.createStatement();\n String sql = \"DELETE FROM photos WHERE photoid='\" + imageID + \"'\";\n // System.out.println(\"sql = \" + sql);\n st.executeUpdate(sql);\n statisticdeletefile();\n\n if (rs != null) {\n rs.close();\n }\n if (st != null) {\n st.close();\n }\n if (conn != null) {\n conn.close();\n }\n\n\n } catch (Exception e) {\n e.printStackTrace();\n }\n\n }\n\n public void statisticdeletefile() {\n //UPDATE WRONG SET WrongSize = WrongSize + 1 \n\n Connection conn = null;\n Statement st = null;\n\n try {\n\n\n Class.forName(\"com.mysql.jdbc.Driver\").newInstance();\n String dbUrl = \"jdbc:mysql://omitechnology.com/omitechn_flick?useUnicode=true&characterEncode=UTF-8\";\n conn = DriverManager.getConnection(dbUrl, \"omite_flick\", \"flick\");\n st = conn.createStatement();\n String sqlquery = \"UPDATE statistic SET deletedimagesize = deletedimagesize + 1\";\n\n st.execute(sqlquery);\n conn.close();\n st.close();\n // System.out.println(\"link yok\");\n } catch (Exception e) {\n e.printStackTrace();\n }\n\n\n\n\n\n }\n}\n", "meta": {"content_hash": "c8c9d5e01a9027a5daa5f6df40ce6862", "timestamp": "", "source": "github", "line_count": 233, "max_line_length": 113, "avg_line_length": 29.44206008583691, "alnum_prop": 0.5112244897959184, "repo_name": "pcvlab/Flicker-Data-Integration", "id": "78e9093fd6abe915bd718f22546ed634f29cf49c", "size": "6860", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "FlickerWeb3/src/java/MyProcess/GetImagesT.java", "mode": "33188", "license": "mit", "language": [{"name": "Java", "bytes": "172413"}]}} {"text": "import rethinkdb as r\nconn = r.connect('localhost', 28015)\n\nr.db('rethinkdb').table('cluster_config').get('auth').update(\n {'auth_key': 'cloudroutes'}\n).run(conn)\n", "meta": {"content_hash": "a5210b21ba9d9f65439026c2688fd29d", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 61, "avg_line_length": 27.666666666666668, "alnum_prop": 0.6746987951807228, "repo_name": "Runbook/runbook", "id": "f823089a6e019a470d8c7217fba30608b30d2096", "size": "166", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "src/bridge/mgmtscripts/set_rethink_auth_key.py", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "17816"}, {"name": "HTML", "bytes": "227999"}, {"name": "JavaScript", "bytes": "4250"}, {"name": "Python", "bytes": "754910"}, {"name": "Shell", "bytes": "5859"}]}} {"text": "using namespace v8;\n\nPersistent PriorityQ::constructor;\n\n\nPriorityQ::PriorityQ() {\n\n auto compare = [](const std::shared_ptr &lhs, const std::shared_ptr &rhs) -> bool {\n\n return lhs->priority < rhs->priority;\n };\n\n hq_ = std::make_shared(compare);\n}\n\n\nPriorityQ::PriorityQ(Isolate *isolate, Local cmp) {\n\n HandleScope handle_scope(isolate);\n LOGD(\"Setting up comparotor\")\n if (cmp.IsEmpty()) {\n LOGD(\"cmp is EMPTY!\");\n } else {\n LOGD(\"CMP WAS NOT EMPTY\")\n }\n\n hasComparator_ = true;\n\n CopyablePersistentObject pcmp;\n pcmp.Reset(isolate, cmp);\n\n auto ptrCmp = std::make_shared(std::move(pcmp));\n\n auto compare = [ptrCmp, isolate](const std::shared_ptr &lhs,\n const std::shared_ptr &rhs) -> bool {\n //return lhs->priority < rhs->priority;\n LOGD(\"CALLING JS COMPARATOR !\")\n Local g = ptrCmp->Get(isolate);\n if (g.IsEmpty()) {\n LOGD(\"comparotor was empty inside persistent\")\n } else {\n LOGD(\"cmp was NOT empty in persistent\")\n }\n Local fn = Local::Cast(g);\n LOGD(\"CP AFTER CAST\")\n Handle argv[2];\n\n Local o1 = lhs->cpo.Get(isolate);\n argv[0] = o1;\n LOGD(\"AFTER ARG0\")\n\n Local o2 = rhs->cpo.Get(isolate);\n argv[1] = o2;\n LOGD(\"ADTER ARG2\")\n\n MaybeLocal res = fn->Call(Null(isolate), 2, argv);\n if (res.IsEmpty()) {\n LOGD(\"!!!!!!!!!!!!!! BAD FUNCTION CALL. No RESULT !!!!!!!!\")\n return false;\n } else {\n return res.ToLocalChecked()->BooleanValue();\n }\n\n };\n\n hq_ = std::make_shared(compare);\n}\n\n\nvoid PriorityQ::Init(v8::Local exports) {\n\n Isolate *isolate = exports->GetIsolate();\n\n // Prepare constructor template\n Local tpl = FunctionTemplate::New(isolate, New);\n tpl->SetClassName(String::NewFromUtf8(isolate, \"priority_queue_native\"));\n tpl->InstanceTemplate()->SetInternalFieldCount(1);\n\n NODE_SET_PROTOTYPE_METHOD(tpl, \"push\", Push);\n NODE_SET_PROTOTYPE_METHOD(tpl, \"top\", Top);\n NODE_SET_PROTOTYPE_METHOD(tpl, \"pop\", Pop);\n NODE_SET_PROTOTYPE_METHOD(tpl, \"size\", Size);\n NODE_SET_PROTOTYPE_METHOD(tpl, \"next\", Next);\n\n NODE_SET_ITERATOR_METHOD(tpl, GetIterator);\n\n constructor.Reset(isolate, tpl->GetFunction());\n exports->Set(String::NewFromUtf8(isolate, \"PriorityQueue\"),\n tpl->GetFunction());\n\n exports->Set(String::NewFromUtf8(isolate, \"PriorityQueueCompare\"),\n tpl->GetFunction());\n\n}\n\n\nvoid PriorityQ::New(const v8::FunctionCallbackInfo &args) {\n\n Isolate *isolate = args.GetIsolate();\n\n if (args.IsConstructCall()) {\n PriorityQ *obj;\n // Invoked as constructor\n if (args[0]->IsFunction()) {\n Local cmp = Local::Cast(args[0]);\n obj = new PriorityQ(isolate, cmp);\n } else {\n obj = new PriorityQ();\n }\n\n obj->Wrap(args.This());\n args.GetReturnValue().Set(args.This());\n } else {\n\n // Invoked as plain function, turn into construct call.\n const int argc = 1;\n Local argv[argc] = {args[0]};\n Local cons = Local::New(isolate, constructor);\n\n MaybeLocal o = cons->NewInstance(isolate->GetCurrentContext(), argc, argv);\n args.GetReturnValue().Set(o.ToLocalChecked());\n }\n\n}\n\n\nvoid PriorityQ::Push(const v8::FunctionCallbackInfo &args) {\n Isolate *isolate = args.GetIsolate();\n PriorityQ *obj = Unwrap(args.Holder());\n double d = 0;\n LocalType t;\n Local lo;\n if (args[0]->IsNumber()) {\n t = LocalType::NUMBER;\n lo = args[0]->ToNumber(isolate);\n } else if (args[0]->IsString()) {\n t = LocalType::STRING;\n lo = args[0]->ToString(isolate);\n } else if (args[0]->IsBoolean()) {\n t = LocalType::BOOLEAN;\n lo = args[0]->ToBoolean(isolate);\n } else {\n t = LocalType::OBJECT;\n lo = args[0]->ToObject(isolate);\n }\n\n\n // Check for second arg requirements\n // its required ONLY if there is no comparator\n if (!obj->hasComparator_) {\n Local ln = args[1]->ToNumber(isolate);\n d = ln->NumberValue();\n LOGD2(\"~ Adding to hq_ with priority=\", d);\n }\n\n obj->hq_->push(std::make_shared(d, isolate, lo, t));\n LOGD(\"Item pushed to queue\");\n}\n\n\nLocal PriorityQ::Top_(const v8::FunctionCallbackInfo &args) {\n\n Isolate *isolate = args.GetIsolate();\n EscapableHandleScope my_handle_scope(isolate);\n PriorityQ *obj = Unwrap(args.Holder());\n\n\n // Very important to check size first\n // if queue is empty then calling top and pop\n // will result in segmentation fault\n // wrapping this inside native try/catch will not help\n if (obj->hq_->size() > 0) {\n LOGD(\"Inside Top :: Have items in queue\")\n\n auto top = obj->hq_->top();\n Local lo = top->cpo.Get(isolate);\n LocalType t = top->T_;\n\n switch (t) {\n case LocalType::NUMBER:\n LOGD(\"RETURNING AS NUMBER\")\n return my_handle_scope.Escape(lo->ToNumber(isolate));\n\n case LocalType::STRING:\n LOGD(\"RETURNING AS STRING\")\n return my_handle_scope.Escape(lo->ToString(isolate));\n\n case LocalType::BOOLEAN:\n LOGD(\"RETURNING AS BOOLEAN\")\n return my_handle_scope.Escape(lo->ToBoolean(isolate));\n\n default:\n LOGD(\"RETURNING AS OBJECT\")\n return my_handle_scope.Escape(lo->ToObject(isolate));\n\n }\n\n } else {\n LOGD(\"NO ITEMS IN QUEUE. WILL RETURN UNDEFINED TO TOP\")\n return my_handle_scope.Escape(Undefined(isolate));\n }\n\n}\n\n\nvoid PriorityQ::Top(const v8::FunctionCallbackInfo &args) {\n\n args.GetReturnValue().Set(Top_(args));\n\n}\n\n\nvoid PriorityQ::Pop(const v8::FunctionCallbackInfo &args) {\n LOGD(\"ENTERED POP\");\n PriorityQ *obj = Unwrap(args.Holder());\n\n args.GetReturnValue().Set(Top_(args));\n if (obj->hq_->size() > 0) {\n\n obj->hq_->pop();\n }\n\n}\n\n\nvoid PriorityQ::Size(const v8::FunctionCallbackInfo &args) {\n Isolate *isolate = args.GetIsolate();\n PriorityQ *obj = Unwrap(args.Holder());\n\n\n LOGD(\"Looking for size\")\n size_t sz = obj->hq_->size();\n\n args.GetReturnValue().Set(Number::New(isolate, sz));\n}\n\n\nvoid PriorityQ::GetIterator(const v8::FunctionCallbackInfo &args) {\n\n args.GetReturnValue().Set(args.This());\n}\n\n\nvoid PriorityQ::Next(const FunctionCallbackInfo &args) {\n\n Isolate *isolate = args.GetIsolate();\n PriorityQ *obj = Unwrap(args.Holder());\n\n Local retObj = Object::New(isolate);\n\n Local lv;\n\n if (obj->hq_->size() > 0) {\n LOGD(\"Inside Pop :: Have items in queue\")\n lv = Top_(args);\n obj->hq_->pop();\n // now we must create local object with properties that iterator\n // is expected to return\n //\n retObj->Set(String::NewFromUtf8(isolate, ITER_VALUE), lv);\n retObj->Set(String::NewFromUtf8(isolate, ITER_DONE), Boolean::New(isolate, false));\n\n\n } else {\n LOGD(\"ITERATOR::NEXT NO ITEMS IN QUEUE\")\n retObj->Set(String::NewFromUtf8(isolate, ITER_VALUE), Undefined(isolate));\n retObj->Set(String::NewFromUtf8(isolate, ITER_DONE), Boolean::New(isolate, true));\n }\n\n args.GetReturnValue().Set(retObj);\n}\n\n\n// If implementing optional return method of iterator\n// its very important that it returns IteratorResult from it, otherwise\n// some v8 versions will crash.\nvoid PriorityQ::Return(const v8::FunctionCallbackInfo &args) {\n LOGD(\"^^^^^^^^^^^^^ ITER::Return called ^^^^^^^^^^^^^\");\n Isolate *isolate = args.GetIsolate();\n Local retObj = Object::New(isolate);\n retObj->Set(String::NewFromUtf8(isolate, ITER_DONE), Boolean::New(isolate, true));\n args.GetReturnValue().Set(retObj);\n}\n\n\nvoid InitAll(Local exports) {\n PriorityQ::Init(exports);\n}\n\nNODE_MODULE(mypq, InitAll)\n\n\n\n", "meta": {"content_hash": "8e1f35c79a2a3bd0980c13350f82478b", "timestamp": "", "source": "github", "line_count": 293, "max_line_length": 117, "avg_line_length": 28.75085324232082, "alnum_prop": 0.6043447293447294, "repo_name": "snytkine/node_priority_queue_native", "id": "7c5034ccfd29d0c9defe8926612c7f11e66f5d01", "size": "8576", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/priorityqueue_native.cpp", "mode": "33188", "license": "mit", "language": [{"name": "C++", "bytes": "14573"}, {"name": "CMake", "bytes": "511"}, {"name": "Python", "bytes": "631"}]}} {"text": "/*\n This file is part of the WebKit open source project.\n This file has been generated by generate-bindings.pl. DO NOT MODIFY!\n\n This library is free software; you can redistribute it and/or\n modify it under the terms of the GNU Library General Public\n License as published by the Free Software Foundation; either\n version 2 of the License, or (at your option) any later version.\n\n This library is distributed in the hope that it will be useful,\n but WITHOUT ANY WARRANTY; without even the implied warranty of\n MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n Library General Public License for more details.\n\n You should have received a copy of the GNU Library General Public License\n along with this library; see the file COPYING.LIB. If not, write to\n the Free Software Foundation, Inc., 59 Temple Place - Suite 330,\n Boston, MA 02111-1307, USA.\n*/\n\n#include \"config.h\"\n#include \"V8SVGFETurbulenceElement.h\"\n\n#if ENABLE(SVG) && ENABLE(FILTERS)\n\n#include \"CSSMutableStyleDeclaration.h\"\n#include \"ExceptionCode.h\"\n#include \"RuntimeEnabledFeatures.h\"\n#include \"V8Binding.h\"\n#include \"V8BindingMacros.h\"\n#include \"V8BindingState.h\"\n#include \"V8CSSStyleDeclaration.h\"\n#include \"V8CSSValue.h\"\n#include \"V8DOMWrapper.h\"\n#include \"V8IsolatedContext.h\"\n#include \"V8Proxy.h\"\n#include \"V8SVGAnimatedEnumeration.h\"\n#include \"V8SVGAnimatedInteger.h\"\n#include \"V8SVGAnimatedLength.h\"\n#include \"V8SVGAnimatedNumber.h\"\n#include \"V8SVGAnimatedString.h\"\n#include \"V8SVGElement.h\"\n#include \n#include \n#include \n\nnamespace WebCore {\n\nWrapperTypeInfo V8SVGFETurbulenceElement::info = { V8SVGFETurbulenceElement::GetTemplate, V8SVGFETurbulenceElement::derefObject, 0, &V8SVGElement::info };\n\nnamespace SVGFETurbulenceElementInternal {\n\ntemplate void V8_USE(T) { }\n\nstatic v8::Handle baseFrequencyXAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.baseFrequencyX._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->baseFrequencyXAnimated());\n}\n\nstatic v8::Handle baseFrequencyYAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.baseFrequencyY._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->baseFrequencyYAnimated());\n}\n\nstatic v8::Handle numOctavesAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.numOctaves._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->numOctavesAnimated());\n}\n\nstatic v8::Handle seedAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.seed._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->seedAnimated());\n}\n\nstatic v8::Handle stitchTilesAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.stitchTiles._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(static_pointer_cast(imp->stitchTilesAnimated()));\n}\n\nstatic v8::Handle typeAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.type._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(static_pointer_cast(imp->typeAnimated()));\n}\n\nstatic v8::Handle xAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.x._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->xAnimated());\n}\n\nstatic v8::Handle yAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.y._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->yAnimated());\n}\n\nstatic v8::Handle widthAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.width._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->widthAnimated());\n}\n\nstatic v8::Handle heightAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.height._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->heightAnimated());\n}\n\nstatic v8::Handle resultAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.result._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->resultAnimated());\n}\n\nstatic v8::Handle classNameAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.className._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->classNameAnimated());\n}\n\nstatic v8::Handle styleAttrGetter(v8::Local name, const v8::AccessorInfo& info)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.style._get\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(info.Holder());\n return toV8(imp->style());\n}\n\nstatic v8::Handle getPresentationAttributeCallback(const v8::Arguments& args)\n{\n INC_STATS(\"DOM.SVGFETurbulenceElement.getPresentationAttribute\");\n SVGFETurbulenceElement* imp = V8SVGFETurbulenceElement::toNative(args.Holder());\n STRING_TO_V8PARAMETER_EXCEPTION_BLOCK(V8Parameter<>, name, MAYBE_MISSING_PARAMETER(args, 0, MissingIsUndefined));\n return toV8(imp->getPresentationAttribute(name));\n}\n\n} // namespace SVGFETurbulenceElementInternal\n\nstatic const BatchedAttribute SVGFETurbulenceElementAttrs[] = {\n // Attribute 'baseFrequencyX' (Type: 'readonly attribute' ExtAttr: '')\n {\"baseFrequencyX\", SVGFETurbulenceElementInternal::baseFrequencyXAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'baseFrequencyY' (Type: 'readonly attribute' ExtAttr: '')\n {\"baseFrequencyY\", SVGFETurbulenceElementInternal::baseFrequencyYAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'numOctaves' (Type: 'readonly attribute' ExtAttr: '')\n {\"numOctaves\", SVGFETurbulenceElementInternal::numOctavesAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'seed' (Type: 'readonly attribute' ExtAttr: '')\n {\"seed\", SVGFETurbulenceElementInternal::seedAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'stitchTiles' (Type: 'readonly attribute' ExtAttr: '')\n {\"stitchTiles\", SVGFETurbulenceElementInternal::stitchTilesAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'type' (Type: 'readonly attribute' ExtAttr: '')\n {\"type\", SVGFETurbulenceElementInternal::typeAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'x' (Type: 'readonly attribute' ExtAttr: '')\n {\"x\", SVGFETurbulenceElementInternal::xAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'y' (Type: 'readonly attribute' ExtAttr: '')\n {\"y\", SVGFETurbulenceElementInternal::yAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'width' (Type: 'readonly attribute' ExtAttr: '')\n {\"width\", SVGFETurbulenceElementInternal::widthAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'height' (Type: 'readonly attribute' ExtAttr: '')\n {\"height\", SVGFETurbulenceElementInternal::heightAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'result' (Type: 'readonly attribute' ExtAttr: '')\n {\"result\", SVGFETurbulenceElementInternal::resultAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'className' (Type: 'readonly attribute' ExtAttr: '')\n {\"className\", SVGFETurbulenceElementInternal::classNameAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n // Attribute 'style' (Type: 'readonly attribute' ExtAttr: '')\n {\"style\", SVGFETurbulenceElementInternal::styleAttrGetter, 0, 0 /* no data */, static_cast(v8::DEFAULT), static_cast(v8::None), 0 /* on instance */},\n};\n\nstatic const BatchedCallback SVGFETurbulenceElementCallbacks[] = {\n {\"getPresentationAttribute\", SVGFETurbulenceElementInternal::getPresentationAttributeCallback},\n};\n\nstatic const BatchedConstant SVGFETurbulenceElementConsts[] = {\n {\"SVG_TURBULENCE_TYPE_UNKNOWN\", static_cast(0)},\n {\"SVG_TURBULENCE_TYPE_FRACTALNOISE\", static_cast(1)},\n {\"SVG_TURBULENCE_TYPE_TURBULENCE\", static_cast(2)},\n {\"SVG_STITCHTYPE_UNKNOWN\", static_cast(0)},\n {\"SVG_STITCHTYPE_STITCH\", static_cast(1)},\n {\"SVG_STITCHTYPE_NOSTITCH\", static_cast(2)},\n};\n\nstatic v8::Persistent ConfigureV8SVGFETurbulenceElementTemplate(v8::Persistent desc)\n{\n desc->ReadOnlyPrototype();\n\n v8::Local defaultSignature = configureTemplate(desc, \"SVGFETurbulenceElement\", V8SVGElement::GetTemplate(), V8SVGFETurbulenceElement::internalFieldCount,\n SVGFETurbulenceElementAttrs, WTF_ARRAY_LENGTH(SVGFETurbulenceElementAttrs),\n SVGFETurbulenceElementCallbacks, WTF_ARRAY_LENGTH(SVGFETurbulenceElementCallbacks));\n v8::Local instance = desc->InstanceTemplate();\n v8::Local proto = desc->PrototypeTemplate();\n \n batchConfigureConstants(desc, proto, SVGFETurbulenceElementConsts, WTF_ARRAY_LENGTH(SVGFETurbulenceElementConsts));\n\n // Custom toString template\n desc->Set(getToStringName(), getToStringTemplate());\n return desc;\n}\n\nv8::Persistent V8SVGFETurbulenceElement::GetRawTemplate()\n{\n V8BindingPerIsolateData* data = V8BindingPerIsolateData::current();\n V8BindingPerIsolateData::TemplateMap::iterator result = data->rawTemplateMap().find(&info);\n if (result != data->rawTemplateMap().end())\n return result->second;\n\n v8::HandleScope handleScope;\n v8::Persistent templ = createRawTemplate();\n data->rawTemplateMap().add(&info, templ);\n return templ;\n}\n\nv8::Persistent V8SVGFETurbulenceElement::GetTemplate()\n{\n V8BindingPerIsolateData* data = V8BindingPerIsolateData::current();\n V8BindingPerIsolateData::TemplateMap::iterator result = data->templateMap().find(&info);\n if (result != data->templateMap().end())\n return result->second;\n\n v8::HandleScope handleScope;\n v8::Persistent templ =\n ConfigureV8SVGFETurbulenceElementTemplate(GetRawTemplate());\n data->templateMap().add(&info, templ);\n return templ;\n}\n\nbool V8SVGFETurbulenceElement::HasInstance(v8::Handle value)\n{\n return GetRawTemplate()->HasInstance(value);\n}\n\n\nv8::Handle V8SVGFETurbulenceElement::wrapSlow(SVGFETurbulenceElement* impl)\n{\n v8::Handle wrapper;\n V8Proxy* proxy = 0;\n if (impl->document()) {\n proxy = V8Proxy::retrieve(impl->document()->frame());\n if (proxy && static_cast(impl->document()) == static_cast(impl)) {\n if (proxy->windowShell()->initContextIfNeeded()) {\n // initContextIfNeeded may have created a wrapper for the object, retry from the start.\n return V8SVGFETurbulenceElement::wrap(impl);\n }\n }\n }\n\n\n v8::Handle context;\n if (proxy)\n context = proxy->context();\n\n // Enter the node's context and create the wrapper in that context.\n if (!context.IsEmpty())\n context->Enter();\n wrapper = V8DOMWrapper::instantiateV8Object(proxy, &info, impl);\n // Exit the node's context if it was entered.\n if (!context.IsEmpty())\n context->Exit();\n if (wrapper.IsEmpty())\n return wrapper;\n\n impl->ref();\n v8::Persistent wrapperHandle = v8::Persistent::New(wrapper);\n\n if (!hasDependentLifetime)\n wrapperHandle.MarkIndependent();\n wrapperHandle.SetWrapperClassId(v8DOMSubtreeClassId);\n getDOMNodeMap().set(impl, wrapperHandle);\n return wrapper;\n}\n\nvoid V8SVGFETurbulenceElement::derefObject(void* object)\n{\n static_cast(object)->deref();\n}\n\n} // namespace WebCore\n\n#endif // ENABLE(SVG) && ENABLE(FILTERS)\n", "meta": {"content_hash": "c86795024df3a1976bbfa5591ff239ed", "timestamp": "", "source": "github", "line_count": 294, "max_line_length": 213, "avg_line_length": 47.54081632653061, "alnum_prop": 0.7255491164055233, "repo_name": "Treeeater/WebPermission", "id": "2639810f3f0f575252c83425bb27d61b77f2fe2b", "size": "13977", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src_chrome_Release_obj_global_intermediate_webcore/bindings/V8SVGFETurbulenceElement.cpp", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "Assembly", "bytes": "1301"}, {"name": "C", "bytes": "1820540"}, {"name": "C++", "bytes": "38574524"}, {"name": "Java", "bytes": "4882"}, {"name": "JavaScript", "bytes": "2238901"}, {"name": "Objective-C", "bytes": "1768529"}, {"name": "PHP", "bytes": "606"}, {"name": "Perl", "bytes": "699893"}, {"name": "Prolog", "bytes": "142937"}, {"name": "Python", "bytes": "131318"}, {"name": "R", "bytes": "290"}, {"name": "Ruby", "bytes": "3798"}, {"name": "Shell", "bytes": "52312"}]}} {"text": "\"\"\"\nThe `compat` module provides support for backwards compatibility with older\nversions of Django/Python, and compatibility wrappers around optional packages.\n\"\"\"\n\n# flake8: noqa\nfrom __future__ import unicode_literals\n\nimport inspect\n\nimport django\nfrom django.apps import apps\nfrom django.conf import settings\nfrom django.core.exceptions import ImproperlyConfigured\nfrom django.db import connection, models, transaction\nfrom django.template import Context, RequestContext, Template\nfrom django.utils import six\nfrom django.views.generic import View\n\n\ntry:\n from django.urls import (\n NoReverseMatch, RegexURLPattern, RegexURLResolver, ResolverMatch, Resolver404, get_script_prefix, reverse, reverse_lazy, resolve\n )\nexcept ImportError:\n from django.core.urlresolvers import ( # Will be removed in Django 2.0\n NoReverseMatch, RegexURLPattern, RegexURLResolver, ResolverMatch, Resolver404, get_script_prefix, reverse, reverse_lazy, resolve\n )\n\n\ntry:\n import urlparse # Python 2.x\nexcept ImportError:\n import urllib.parse as urlparse\n\n\ndef unicode_repr(instance):\n # Get the repr of an instance, but ensure it is a unicode string\n # on both python 3 (already the case) and 2 (not the case).\n if six.PY2:\n return repr(instance).decode('utf-8')\n return repr(instance)\n\n\ndef unicode_to_repr(value):\n # Coerce a unicode string to the correct repr return type, depending on\n # the Python version. We wrap all our `__repr__` implementations with\n # this and then use unicode throughout internally.\n if six.PY2:\n return value.encode('utf-8')\n return value\n\n\ndef unicode_http_header(value):\n # Coerce HTTP header value to unicode.\n if isinstance(value, six.binary_type):\n return value.decode('iso-8859-1')\n return value\n\n\ndef total_seconds(timedelta):\n # TimeDelta.total_seconds() is only available in Python 2.7\n if hasattr(timedelta, 'total_seconds'):\n return timedelta.total_seconds()\n else:\n return (timedelta.days * 86400.0) + float(timedelta.seconds) + (timedelta.microseconds / 1000000.0)\n\n\ndef distinct(queryset, base):\n if settings.DATABASES[queryset.db][\"ENGINE\"] == \"django.db.backends.oracle\":\n # distinct analogue for Oracle users\n return base.filter(pk__in=set(queryset.values_list('pk', flat=True)))\n return queryset.distinct()\n\n\n# Obtaining manager instances and names from model options differs after 1.10.\ndef get_names_and_managers(options):\n if django.VERSION >= (1, 10):\n # Django 1.10 onwards provides a `.managers` property on the Options.\n return [\n (manager.name, manager)\n for manager\n in options.managers\n ]\n # For Django 1.8 and 1.9, use the three-tuple information provided\n # by .concrete_managers and .abstract_managers\n return [\n (manager_info[1], manager_info[2])\n for manager_info\n in (options.concrete_managers + options.abstract_managers)\n ]\n\n\n# field.rel is deprecated from 1.9 onwards\ndef get_remote_field(field, **kwargs):\n if 'default' in kwargs:\n if django.VERSION < (1, 9):\n return getattr(field, 'rel', kwargs['default'])\n return getattr(field, 'remote_field', kwargs['default'])\n\n if django.VERSION < (1, 9):\n return field.rel\n return field.remote_field\n\n\ndef _resolve_model(obj):\n \"\"\"\n Resolve supplied `obj` to a Django model class.\n\n `obj` must be a Django model class itself, or a string\n representation of one. Useful in situations like GH #1225 where\n Django may not have resolved a string-based reference to a model in\n another model's foreign key definition.\n\n String representations should have the format:\n 'appname.ModelName'\n \"\"\"\n if isinstance(obj, six.string_types) and len(obj.split('.')) == 2:\n app_name, model_name = obj.split('.')\n resolved_model = apps.get_model(app_name, model_name)\n if resolved_model is None:\n msg = \"Django did not return a model for {0}.{1}\"\n raise ImproperlyConfigured(msg.format(app_name, model_name))\n return resolved_model\n elif inspect.isclass(obj) and issubclass(obj, models.Model):\n return obj\n raise ValueError(\"{0} is not a Django model\".format(obj))\n\n\ndef is_authenticated(user):\n if django.VERSION < (1, 10):\n return user.is_authenticated()\n return user.is_authenticated\n\n\ndef is_anonymous(user):\n if django.VERSION < (1, 10):\n return user.is_anonymous()\n return user.is_anonymous\n\n\ndef get_related_model(field):\n if django.VERSION < (1, 9):\n return _resolve_model(field.rel.to)\n return field.remote_field.model\n\n\ndef value_from_object(field, obj):\n if django.VERSION < (1, 9):\n return field._get_val_from_obj(obj)\n return field.value_from_object(obj)\n\n\n# contrib.postgres only supported from 1.8 onwards.\ntry:\n from django.contrib.postgres import fields as postgres_fields\nexcept ImportError:\n postgres_fields = None\n\n\n# JSONField is only supported from 1.9 onwards\ntry:\n from django.contrib.postgres.fields import JSONField\nexcept ImportError:\n JSONField = None\n\n\n# coreapi is optional (Note that uritemplate is a dependency of coreapi)\ntry:\n import coreapi\n import uritemplate\nexcept (ImportError, SyntaxError):\n # SyntaxError is possible under python 3.2\n coreapi = None\n uritemplate = None\n\n\n# coreschema is optional\ntry:\n import coreschema\nexcept ImportError:\n coreschema = None\n\n\n# django-filter is optional\ntry:\n import django_filters\nexcept ImportError:\n django_filters = None\n\n\n# django-crispy-forms is optional\ntry:\n import crispy_forms\nexcept ImportError:\n crispy_forms = None\n\n\n# requests is optional\ntry:\n import requests\nexcept ImportError:\n requests = None\n\n\n# Django-guardian is optional. Import only if guardian is in INSTALLED_APPS\n# Fixes (#1712). We keep the try/except for the test suite.\nguardian = None\ntry:\n if 'guardian' in settings.INSTALLED_APPS:\n import guardian\nexcept ImportError:\n pass\n\n\n# PATCH method is not implemented by Django\nif 'patch' not in View.http_method_names:\n View.http_method_names = View.http_method_names + ['patch']\n\n\n# Markdown is optional\ntry:\n import markdown\n\n if markdown.version <= '2.2':\n HEADERID_EXT_PATH = 'headerid'\n LEVEL_PARAM = 'level'\n elif markdown.version < '2.6':\n HEADERID_EXT_PATH = 'markdown.extensions.headerid'\n LEVEL_PARAM = 'level'\n else:\n HEADERID_EXT_PATH = 'markdown.extensions.toc'\n LEVEL_PARAM = 'baselevel'\n\n def apply_markdown(text):\n \"\"\"\n Simple wrapper around :func:`markdown.markdown` to set the base level\n of '#' style headers to

.\n \"\"\"\n extensions = [HEADERID_EXT_PATH]\n extension_configs = {\n HEADERID_EXT_PATH: {\n LEVEL_PARAM: '2'\n }\n }\n md = markdown.Markdown(\n extensions=extensions, extension_configs=extension_configs\n )\n return md.convert(text)\nexcept ImportError:\n apply_markdown = None\n markdown = None\n\n\ntry:\n import pygments\n from pygments.lexers import get_lexer_by_name\n from pygments.formatters import HtmlFormatter\n\n def pygments_highlight(text, lang, style):\n lexer = get_lexer_by_name(lang, stripall=False)\n formatter = HtmlFormatter(nowrap=True, style=style)\n return pygments.highlight(text, lexer, formatter)\n\n def pygments_css(style):\n formatter = HtmlFormatter(style=style)\n return formatter.get_style_defs('.highlight')\n\nexcept ImportError:\n pygments = None\n\n def pygments_highlight(text, lang, style):\n return text\n\n def pygments_css(style):\n return None\n\n# `separators` argument to `json.dumps()` differs between 2.x and 3.x\n# See: http://bugs.python.org/issue22767\nif six.PY3:\n SHORT_SEPARATORS = (',', ':')\n LONG_SEPARATORS = (', ', ': ')\n INDENT_SEPARATORS = (',', ': ')\nelse:\n SHORT_SEPARATORS = (b',', b':')\n LONG_SEPARATORS = (b', ', b': ')\n INDENT_SEPARATORS = (b',', b': ')\n\ntry:\n # DecimalValidator is unavailable in Django < 1.9\n from django.core.validators import DecimalValidator\nexcept ImportError:\n DecimalValidator = None\n\n\ndef set_rollback():\n if hasattr(transaction, 'set_rollback'):\n if connection.settings_dict.get('ATOMIC_REQUESTS', False):\n # If running in >=1.6 then mark a rollback as required,\n # and allow it to be handled by Django.\n if connection.in_atomic_block:\n transaction.set_rollback(True)\n elif transaction.is_managed():\n # Otherwise handle it explicitly if in managed mode.\n if transaction.is_dirty():\n transaction.rollback()\n transaction.leave_transaction_management()\n else:\n # transaction not managed\n pass\n\n\ndef template_render(template, context=None, request=None):\n \"\"\"\n Passing Context or RequestContext to Template.render is deprecated in 1.9+,\n see https://github.com/django/django/pull/3883 and\n https://github.com/django/django/blob/1.9/django/template/backends/django.py#L82-L84\n\n :param template: Template instance\n :param context: dict\n :param request: Request instance\n :return: rendered template as SafeText instance\n \"\"\"\n if isinstance(template, Template):\n if request:\n context = RequestContext(request, context)\n else:\n context = Context(context)\n return template.render(context)\n # backends template, e.g. django.template.backends.django.Template\n else:\n return template.render(context, request=request)\n\n\ndef set_many(instance, field, value):\n if django.VERSION < (1, 10):\n setattr(instance, field, value)\n else:\n field = getattr(instance, field)\n field.set(value)\n\ndef include(module, namespace=None, app_name=None):\n from django.conf.urls import include\n if django.VERSION < (1,9):\n return include(module, namespace, app_name)\n else:\n return include((module, app_name), namespace)\n", "meta": {"content_hash": "0bdefcfe7929383e99b8e6e17e3d101a", "timestamp": "", "source": "github", "line_count": 347, "max_line_length": 136, "avg_line_length": 29.259365994236312, "alnum_prop": 0.6717226435536294, "repo_name": "BassantMorsi/finderApp", "id": "45ac498417378ba92e6968d4026d473c8dfca7b3", "size": "10153", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "lib/python2.7/site-packages/rest_framework/compat.py", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "220402"}, {"name": "C++", "bytes": "96699"}, {"name": "CSS", "bytes": "84455"}, {"name": "Fortran", "bytes": "7439"}, {"name": "HTML", "bytes": "217197"}, {"name": "JavaScript", "bytes": "365169"}, {"name": "Objective-C", "bytes": "567"}, {"name": "Python", "bytes": "14137616"}, {"name": "Shell", "bytes": "3238"}]}} {"text": "\n\npackage org.drools.core.base.accumulators;\n\nimport java.io.Externalizable;\nimport java.io.IOException;\nimport java.io.ObjectInput;\nimport java.io.ObjectOutput;\n\n/**\n * An implementation of an accumulator capable of calculating maximum values\n */\npublic class MaxAccumulateFunction extends AbstractAccumulateFunction {\n\n public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {\n\n }\n\n public void writeExternal(ObjectOutput out) throws IOException {\n\n }\n\n protected static class MaxData implements Externalizable {\n public Comparable max = null;\n\n public MaxData() {}\n\n public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {\n max = (Comparable) in.readObject();\n }\n\n public void writeExternal(ObjectOutput out) throws IOException {\n out.writeObject(max);\n }\n\n @Override\n public String toString() {\n return \"max\";\n }\n }\n\n public MaxData createContext() {\n return new MaxData();\n }\n\n public void init(MaxData data) {\n data.max = null;\n }\n\n public void accumulate(MaxData data,\n Object value) {\n if (value != null) {\n data.max = data.max == null || data.max.compareTo( value ) < 0 ?\n (Comparable) value :\n data.max;\n }\n }\n\n public void reverse(MaxData data,\n Object value) {\n }\n\n public Object getResult(MaxData data) {\n return data.max;\n }\n\n public boolean supportsReverse() {\n return false;\n }\n\n public Class getResultType() {\n return Comparable.class;\n }\n}\n", "meta": {"content_hash": "1562e44275c09a6af042ef699efb7972", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 102, "avg_line_length": 23.783783783783782, "alnum_prop": 0.6102272727272727, "repo_name": "droolsjbpm/drools", "id": "64873471281cc3964259e5d29164d9b26f61aecb", "size": "2380", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "drools-core/src/main/java/org/drools/core/base/accumulators/MaxAccumulateFunction.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2554"}, {"name": "CSS", "bytes": "1412"}, {"name": "GAP", "bytes": "197127"}, {"name": "HTML", "bytes": "9298"}, {"name": "Java", "bytes": "26871006"}, {"name": "Protocol Buffer", "bytes": "13855"}, {"name": "Python", "bytes": "4555"}, {"name": "Ruby", "bytes": "491"}, {"name": "Shell", "bytes": "1120"}, {"name": "Standard ML", "bytes": "82260"}, {"name": "XSLT", "bytes": "24302"}]}} {"text": "\n\n/**\n * @file safety.c\n * Safety button logic.\n */\n\n#include \n\n#include \n\n#include \n\n#include \"px4io.h\"\n\nstatic struct hrt_call arming_call;\nstatic struct hrt_call failsafe_call;\n\n/*\n * Count the number of times in a row that we see the arming button\n * held down.\n */\nstatic unsigned counter = 0;\n\n/*\n * Define the various LED flash sequences for each system state.\n */\n#define LED_PATTERN_FMU_OK_TO_ARM \t\t0x0003\t\t/**< slow blinking\t\t\t*/\n#define LED_PATTERN_FMU_REFUSE_TO_ARM \t\t0x5555\t\t/**< fast blinking\t\t\t*/\n#define LED_PATTERN_IO_ARMED \t\t\t0x5050\t\t/**< long off, then double blink \t*/\n#define LED_PATTERN_FMU_ARMED \t\t\t0x5500\t\t/**< long off, then quad blink \t\t*/\n#define LED_PATTERN_IO_FMU_ARMED \t\t0xffff\t\t/**< constantly on\t\t\t*/\n\nstatic unsigned blink_counter = 0;\n\n/*\n * IMPORTANT: The arming state machine critically\n * \t depends on using the same threshold\n * for arming and disarming. Since disarming\n * is quite deadly for the system, a similar\n * length can be justified.\n */\n#define ARM_COUNTER_THRESHOLD\t10\n\nstatic bool safety_button_pressed;\n\nstatic void safety_check_button(void *arg);\nstatic void failsafe_blink(void *arg);\n\nvoid\nsafety_init(void)\n{\n\t/* arrange for the button handler to be called at 10Hz */\n\thrt_call_every(&arming_call, 1000, 100000, safety_check_button, NULL);\n}\n\nvoid\nfailsafe_led_init(void)\n{\n\t/* arrange for the failsafe blinker to be called at 8Hz */\n\thrt_call_every(&failsafe_call, 1000, 125000, failsafe_blink, NULL);\n}\n\nstatic void\nsafety_check_button(void *arg)\n{\n\t/*\n\t * Debounce the safety button, change state if it has been held for long enough.\n\t *\n\t */\n\tsafety_button_pressed = BUTTON_SAFETY;\n\n\t/*\n\t * Keep pressed for a while to arm.\n\t *\n\t * Note that the counting sequence has to be same length\n\t * for arming / disarming in order to end up as proper\n\t * state machine, keep ARM_COUNTER_THRESHOLD the same\n\t * length in all cases of the if/else struct below.\n\t */\n\tif (safety_button_pressed && !(r_status_flags & PX4IO_P_STATUS_FLAGS_SAFETY_OFF) &&\n\t (r_setup_arming & PX4IO_P_SETUP_ARMING_IO_ARM_OK)) {\n\n\t\tif (counter < ARM_COUNTER_THRESHOLD) {\n\t\t\tcounter++;\n\n\t\t} else if (counter == ARM_COUNTER_THRESHOLD) {\n\t\t\t/* switch to armed state */\n\t\t\tr_status_flags |= PX4IO_P_STATUS_FLAGS_SAFETY_OFF;\n\t\t\tcounter++;\n\t\t}\n\n\t} else if (safety_button_pressed && (r_status_flags & PX4IO_P_STATUS_FLAGS_SAFETY_OFF)) {\n\n\t\tif (counter < ARM_COUNTER_THRESHOLD) {\n\t\t\tcounter++;\n\n\t\t} else if (counter == ARM_COUNTER_THRESHOLD) {\n\t\t\t/* change to disarmed state and notify the FMU */\n\t\t\tr_status_flags &= ~PX4IO_P_STATUS_FLAGS_SAFETY_OFF;\n\t\t\tcounter++;\n\t\t}\n\n\t} else {\n\t\tcounter = 0;\n\t}\n\n\t/* Select the appropriate LED flash pattern depending on the current IO/FMU arm state */\n\tuint16_t pattern = LED_PATTERN_FMU_REFUSE_TO_ARM;\n\n\tif (r_status_flags & PX4IO_P_STATUS_FLAGS_SAFETY_OFF) {\n\t\tif (r_setup_arming & PX4IO_P_SETUP_ARMING_FMU_ARMED) {\n\t\t\tpattern = LED_PATTERN_IO_FMU_ARMED;\n\n\t\t} else {\n\t\t\tpattern = LED_PATTERN_IO_ARMED;\n\t\t}\n\n\t} else if (r_setup_arming & PX4IO_P_SETUP_ARMING_FMU_ARMED) {\n\t\tpattern = LED_PATTERN_FMU_ARMED;\n\n\t} else if (r_setup_arming & PX4IO_P_SETUP_ARMING_IO_ARM_OK) {\n\t\tpattern = LED_PATTERN_FMU_OK_TO_ARM;\n\n\t}\n\n\t/* Turn the LED on if we have a 1 at the current bit position */\n\tLED_SAFETY(pattern & (1 << blink_counter++));\n\n\tif (blink_counter > 15) {\n\t\tblink_counter = 0;\n\t}\n}\n\nstatic void\nfailsafe_blink(void *arg)\n{\n\t/* indicate that a serious initialisation error occured */\n\tif (!(r_status_flags & PX4IO_P_STATUS_FLAGS_INIT_OK)) {\n\t\tLED_AMBER(true);\n\t\treturn;\n\t}\n\n\tstatic bool failsafe = false;\n\n\t/* blink the failsafe LED if we don't have FMU input */\n\tif (!(r_status_flags & PX4IO_P_STATUS_FLAGS_FMU_OK)) {\n\t\tfailsafe = !failsafe;\n\n\t} else {\n\t\tfailsafe = false;\n\t}\n\n\tLED_AMBER(failsafe);\n}\n", "meta": {"content_hash": "4a2ea0d2b2a9a3a5d3fccc2d5f4342ba", "timestamp": "", "source": "github", "line_count": 155, "max_line_length": 90, "avg_line_length": 24.787096774193547, "alnum_prop": 0.6717855283706403, "repo_name": "darknight-007/Firmware", "id": "f4ab74a45a6c347140b937064aa1ef4876ea7789", "size": "5579", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "src/modules/px4iofirmware/safety.c", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "2610222"}, {"name": "C++", "bytes": "5712694"}, {"name": "CMake", "bytes": "548423"}, {"name": "GDB", "bytes": "785"}, {"name": "Io", "bytes": "241"}, {"name": "Makefile", "bytes": "45018"}, {"name": "Matlab", "bytes": "43628"}, {"name": "Python", "bytes": "646848"}, {"name": "Scilab", "bytes": "1502"}, {"name": "Shell", "bytes": "70591"}]}} {"text": "\n\n\n\n\n\n\nUses of Class org.apache.wicket.util.tester.DummyPanelPage (Wicket Parent 1.5.7 API)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n \n \n \n \n \n \n \n \n \n \n
Overview  Package  Class   Use  Tree  Deprecated  Index  Help 
\n
\n\n
\n PREV \n NEXT\n FRAMES  \n NO FRAMES  \n \n\n\n\n
\n\n\n\n
\n
\n

\nUses of Class
org.apache.wicket.util.tester.DummyPanelPage

\n
\nNo usage of org.apache.wicket.util.tester.DummyPanelPage\n

\n


\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n
\n\n\n \n \n \n \n \n \n \n \n \n \n
Overview  Package  Class   Use  Tree  Deprecated  Index  Help 
\n
\n\n
\n PREV \n NEXT\n FRAMES  \n NO FRAMES  \n \n\n\n\n
\n\n\n\n
\nCopyright © 2006-2012 Apache Software Foundation. All Rights Reserved.\n\n\n", "meta": {"content_hash": "c8641c978838a780d9b05db9dc38a548", "timestamp": "", "source": "github", "line_count": 141, "max_line_length": 233, "avg_line_length": 43.46808510638298, "alnum_prop": 0.6165769293522597, "repo_name": "afiantara/apache-wicket-1.5.7", "id": "656a7e16f22cd07b917dcbc0126511bd187d4541", "size": "6129", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "apidocs/org/apache/wicket/util/tester/class-use/DummyPanelPage.html", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Groovy", "bytes": "17122"}, {"name": "Java", "bytes": "10812577"}, {"name": "JavaScript", "bytes": "232484"}]}} {"text": "layout: post\ndate: 2015-11-27\ntitle: \"Camille La Vie Beaded Mesh Two-Tone Dress Sleeveless Floor-Length Mermaid/Trumpet\"\ncategory: Camille La Vie\ntags: [Camille La Vie,Mermaid/Trumpet,Spaghetti Straps,Floor-Length,Sleeveless]\n---\n### Camille La Vie Beaded Mesh Two-Tone Dress\nJust **$279.99**\n### Sleeveless Floor-Length Mermaid/Trumpet \n
BRANDSCamille La Vie
SilhouetteMermaid/Trumpet
NecklineSpaghetti Straps
Hemline/TrainFloor-Length
SleeveSleeveless
\n\"Camille\n\n\"Camille\nBuy it: [https://www.readybrides.com/en/camille-la-vie/10191-camille-la-vie-beaded-mesh-two-tone-dress.html](https://www.readybrides.com/en/camille-la-vie/10191-camille-la-vie-beaded-mesh-two-tone-dress.html)\n", "meta": {"content_hash": "32cfb1d4c19b5facf3a4c6174d0137bc", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 282, "avg_line_length": 97.35714285714286, "alnum_prop": 0.7351430667644901, "repo_name": "HOLEIN/HOLEIN.github.io", "id": "7a714faf1774f6d00daa209332c1f8094ed3ddf2", "size": "1367", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2015-11-27-Camille-La-Vie-Beaded-Mesh-TwoTone-Dress-Sleeveless-FloorLength-MermaidTrumpet.md", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "83876"}, {"name": "HTML", "bytes": "14547"}, {"name": "Ruby", "bytes": "897"}]}} {"text": "import React from 'react'\nimport { useLocation } from 'react-router'\n\nimport Modal from 'v2/components/UI/Modal/Portal'\nimport ModalFullscreenDialog from 'v2/components/UI/ModalFullscreenDialog'\nimport { ModalFullBlock } from 'v2/components/ModalFullBlock'\nimport { useParams } from 'react-router'\n\nexport const ModalBlockWrapper: React.FC = () => {\n const location = useLocation()\n const params = useParams()\n\n const state = location.state as any\n const context = state.context\n\n const set = new Set(\n context\n ?.filter(\n k =>\n k?.__typename !== 'Channel' &&\n k?.__typename !== 'Group' &&\n k?.__typename !== 'User' &&\n !!k?.id\n )\n .map(k => k.id.toString())\n )\n\n const ids = Array.from(set)\n\n return (\n \n \n \n )\n}\n", "meta": {"content_hash": "67d9517c01ffc14b4752ab4dca458196", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 74, "avg_line_length": 26.34285714285714, "alnum_prop": 0.6290672451193059, "repo_name": "aredotna/ervell", "id": "448d862a2d81e6d6e04e61f1d40c6db302ec835f", "size": "922", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/v2/pages/block/ModalBlockWrapper.tsx", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "1214"}, {"name": "CoffeeScript", "bytes": "115860"}, {"name": "HTML", "bytes": "808"}, {"name": "JavaScript", "bytes": "589816"}, {"name": "Procfile", "bytes": "70"}, {"name": "Pug", "bytes": "89001"}, {"name": "Shell", "bytes": "1210"}, {"name": "Stylus", "bytes": "49151"}, {"name": "Swift", "bytes": "15963"}, {"name": "TypeScript", "bytes": "1251781"}]}} {"text": "from led import LED, MAX_BRIGHTNESS\n\n\ndef set_all(num, red, green, blue, brightness=MAX_BRIGHTNESS):\n states = []\n for i in range(num):\n states.append(LED(red, green, blue, brightness))\n return states\n\n\ndef all_off(num):\n return set_all(num, 0, 0, 0, 0)\n", "meta": {"content_hash": "24ce121efb93fb4ba9f2a661efd24f8f", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 62, "avg_line_length": 22.75, "alnum_prop": 0.6446886446886447, "repo_name": "DavidAntliff/apa102", "id": "d1828a32dd236378d5b609fee0f852dfe036fa7f", "size": "273", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "led_list.py", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Python", "bytes": "18505"}]}} {"text": "================================\nFor this assignment, write an http server that will act as a simple data store. It should respond to GET/POST/PUT/PATCH/DELETE requests for a single resource of your choosing. The data coming in from a post request should be saved to a json file in a data folder in your repository, do not commit your data folder to git. For example if a request is sent to /notes with a body of {noteBody: 'hello world'} the json data in the body should be stored in it's own json file. You can pick a naming scheme for the file but I would recommend using the number of files that you have received so far. Submit as a pull request to your own repository.\n\nRubric:\n\nHandles REST requests: 3pts\n\nJSON storage: 3pts\n\nTests: 2pts\n\nProject Organization and Development Files: 2pts\n", "meta": {"content_hash": "1fd017f40a1a5fc32cd9de307b4774dd", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 641, "avg_line_length": 66.33333333333333, "alnum_prop": 0.75, "repo_name": "kasimsiddiqui/HTTP_server_simple_persistence", "id": "2d3097181ad0c0ae6599518e8a7ee678ced8db98", "size": "829", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "3399"}]}} {"text": "\ufeff' Licensed to the .NET Foundation under one or more agreements.\n' The .NET Foundation licenses this file to you under the MIT license.\n' See the LICENSE file in the project root for more information.\n\nImports System.Composition\nImports System.Threading\nImports Microsoft.CodeAnalysis.SignatureHelp\nImports Microsoft.CodeAnalysis.VisualBasic.Syntax\nImports Microsoft.CodeAnalysis.VisualBasic.Utilities.IntrinsicOperators\n\nNamespace Microsoft.CodeAnalysis.VisualBasic.SignatureHelp\n \n Partial Friend Class PredefinedCastExpressionSignatureHelpProvider\n Inherits AbstractIntrinsicOperatorSignatureHelpProvider(Of PredefinedCastExpressionSyntax)\n\n \n Public Sub New()\n End Sub\n\n Protected Overrides Function GetIntrinsicOperatorDocumentationAsync(node As PredefinedCastExpressionSyntax, document As Document, cancellationToken As CancellationToken) As ValueTask(Of IEnumerable(Of AbstractIntrinsicOperatorDocumentation))\n Return New ValueTask(Of IEnumerable(Of AbstractIntrinsicOperatorDocumentation))(GetIntrinsicOperatorDocumentationImplAsync(node, document, cancellationToken))\n End Function\n\n Private Async Function GetIntrinsicOperatorDocumentationImplAsync(node As PredefinedCastExpressionSyntax, document As Document, cancellationToken As CancellationToken) As Task(Of IEnumerable(Of AbstractIntrinsicOperatorDocumentation))\n Return SpecializedCollections.SingletonEnumerable(New PredefinedCastExpressionDocumentation(node.Keyword.Kind, Await document.Project.GetCompilationAsync(cancellationToken).ConfigureAwait(False)))\n End Function\n\n Protected Overrides Function IsTriggerToken(token As SyntaxToken) As Boolean\n Return token.IsChildToken(Of PredefinedCastExpressionSyntax)(Function(ce) ce.OpenParenToken)\n End Function\n\n Public Overrides Function IsTriggerCharacter(ch As Char) As Boolean\n Return ch = \"(\"c\n End Function\n\n Public Overrides Function IsRetriggerCharacter(ch As Char) As Boolean\n Return ch = \")\"c\n End Function\n\n Protected Overrides Function IsArgumentListToken(node As PredefinedCastExpressionSyntax, token As SyntaxToken) As Boolean\n Return node.Keyword <> token AndAlso\n node.CloseParenToken <> token\n End Function\n End Class\nEnd Namespace\n", "meta": {"content_hash": "7cda3dd24f3eba5bfe688239efcef120", "timestamp": "", "source": "github", "line_count": 45, "max_line_length": 249, "avg_line_length": 55.644444444444446, "alnum_prop": 0.7891373801916933, "repo_name": "abock/roslyn", "id": "1a48d05e3573fd6aa77431a7e2f3e064e7c66868", "size": "2506", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/Features/VisualBasic/Portable/SignatureHelp/PredefinedCastExpressionSignatureHelpProvider.vb", "mode": "33188", "license": "mit", "language": [{"name": "1C Enterprise", "bytes": "289100"}, {"name": "Batchfile", "bytes": "9059"}, {"name": "C#", "bytes": "126276814"}, {"name": "C++", "bytes": "5602"}, {"name": "CMake", "bytes": "8276"}, {"name": "Dockerfile", "bytes": "2450"}, {"name": "F#", "bytes": "549"}, {"name": "PowerShell", "bytes": "236203"}, {"name": "Shell", "bytes": "94929"}, {"name": "Visual Basic .NET", "bytes": "70520200"}]}} {"text": "\n\n\t\n\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\n\t\n\t\n\t\n\t\tdefault(compile)\">\n\t\t\t\n\t\t\n\t\n\n", "meta": {"content_hash": "650d078e4607e24c11d53184eac16278", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 107, "avg_line_length": 41.82608695652174, "alnum_prop": 0.7110187110187111, "repo_name": "Bigsby/PoC", "id": "c9218f98e57ff80e3ff80f42283f5e138ff1ec9e", "size": "962", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "java/sbting/first-play/target/scala-2.12/resolution-cache/org.scala-sbt.temp/temp-module-98ec89c315ed408f615fff2c4774286d17323b01/1.1.7/resolved.xml.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ASP.NET", "bytes": "1063"}, {"name": "Assembly", "bytes": "1596"}, {"name": "Batchfile", "bytes": "239"}, {"name": "C", "bytes": "3627"}, {"name": "C#", "bytes": "1224655"}, {"name": "C++", "bytes": "3472"}, {"name": "CSS", "bytes": "463261"}, {"name": "Dockerfile", "bytes": "604"}, {"name": "EJS", "bytes": "551"}, {"name": "F#", "bytes": "32014"}, {"name": "Gherkin", "bytes": "306"}, {"name": "HTML", "bytes": "1768740"}, {"name": "Java", "bytes": "6809"}, {"name": "JavaScript", "bytes": "19893309"}, {"name": "Less", "bytes": "82171"}, {"name": "Makefile", "bytes": "1185"}, {"name": "Perl", "bytes": "414"}, {"name": "PowerShell", "bytes": "18992"}, {"name": "Python", "bytes": "9118"}, {"name": "Ruby", "bytes": "953"}, {"name": "Rust", "bytes": "45"}, {"name": "SCSS", "bytes": "80"}, {"name": "Scala", "bytes": "14006"}, {"name": "Shell", "bytes": "469"}, {"name": "TypeScript", "bytes": "89628"}, {"name": "Visual Basic .NET", "bytes": "13131"}, {"name": "Vue", "bytes": "11685"}, {"name": "XSLT", "bytes": "62979"}]}} {"text": "http_path = \"/\"\ncss_dir = \"css\"\nsass_dir = \"scss\"\nimages_dir = \"images\"\njavascripts_dir = \"js\"\n\n# You can select your preferred output style here (can be overridden via the command line):\noutput_style = :expanded\n\t\t\t #:compressed\n\t\t\t #:compact \n\n# To enable relative paths to assets via compass helper functions. Uncomment:\nrelative_assets = true\n\n# To disable debugging comments that display the original location of your selectors. Uncomment:\n line_comments = false\n\n\n# If you prefer the indented syntax, you might want to regenerate this\n# project again passing --syntax sass, or you can uncomment this:\n# preferred_syntax = :sass\n# and then run:\n# sass-convert -R --from scss --to sass sass scss && rm -rf sass && mv scss sass\n", "meta": {"content_hash": "067f69a088f73733a1d295232c16214e", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 96, "avg_line_length": 31.956521739130434, "alnum_prop": 0.7238095238095238, "repo_name": "jo32/Retina-Sprites-for-Compass", "id": "4e19bf989371379f4f5d7e82f23e9bfcab524f0e", "size": "839", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "demo/config.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "11505"}, {"name": "HTML", "bytes": "563"}, {"name": "Ruby", "bytes": "839"}]}} {"text": "\ufeffusing System.Collections.Generic;\nusing System.Text;\nusing SchoolSystemLogic.Core;\n\nnamespace SchoolSystemLogic.Commands\n{\n public class StudentListMarksCommand : ICommand\n {\n public string Execute(IList parameters)\n {\n var indexOfTheStudent = int.Parse(parameters[0]);\n var marks = Engine.Students[indexOfTheStudent].ListMarks();\n\n var result = new StringBuilder();\n\n if (marks.Length == 0)\n {\n result.Append(\"This student has no marks.\");\n }\n else\n {\n result.AppendLine(\"The student has these marks:\");\n result.AppendLine(marks);\n }\n\n return result.ToString();\n }\n }\n}\n", "meta": {"content_hash": "9836c370df33b1dc6003136cd33783b3", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 71, "avg_line_length": 26.379310344827587, "alnum_prop": 0.5607843137254902, "repo_name": "RuzmanovDev/Telerik-Academy-Season-2016-2017", "id": "b43a72cd516f2cc472e0798473e24c90fcd9a66b", "size": "767", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Modul-II/01.High-Quality-Code/03.HQC-Part-Two/Exam/SchoolSystem/Exam/SchoolSystemLogic/Commands/StudentListMarksCommand.cs", "mode": "33188", "license": "mit", "language": [{"name": "ASP", "bytes": "156062"}, {"name": "C#", "bytes": "6854317"}, {"name": "CSS", "bytes": "151173"}, {"name": "CoffeeScript", "bytes": "3700"}, {"name": "HTML", "bytes": "3848453"}, {"name": "JavaScript", "bytes": "2098645"}, {"name": "PowerShell", "bytes": "287"}, {"name": "SQLPL", "bytes": "4671"}, {"name": "XSLT", "bytes": "3306"}]}} {"text": "\n * @package framework\n * @subpackage security\n */\nabstract class Authenticator extends Object\n{\n\n\t/**\n\t * This variable holds all full namespaced authenticators that should be used\n\t *\n\t * @var array\n\t */\n\tprivate static $authenticators = array('SilverStripe\\Security\\MemberAuthenticator');\n\n\t/**\n\t * Used to influence the order of authenticators on the login-screen\n\t * (default shows first).\n\t *\n\t * @var string\n\t */\n\tprivate static $default_authenticator = 'SilverStripe\\Security\\MemberAuthenticator';\n\n\n\t/**\n\t * Method to authenticate an user\n\t *\n\t * @param array $RAW_data Raw data to authenticate the user\n\t * @param Form $form Optional: If passed, better error messages can be\n\t * produced by using\n\t * {@link Form::sessionMessage()}\n\t *\n\t * @return bool|Member Returns FALSE if authentication fails, otherwise\n\t * the member object\n\t */\n\tpublic static function authenticate($RAW_data, Form $form = null)\n\t{\n\t}\n\n\t/**\n\t * Method that creates the login form for this authentication method\n\t *\n\t * @param Controller $controller The parent controller, necessary to create the\n\t * appropriate form action tag\n\t *\n\t * @return Form Returns the login form to use with this authentication\n\t * method\n\t */\n\tpublic static function get_login_form(Controller $controller)\n\t{\n\t}\n\n\t/**\n\t * Method that creates the re-authentication form for the in-CMS view\n\t *\n\t * @param Controller $controller\n\t */\n\tpublic static function get_cms_login_form(Controller $controller)\n\t{\n\t}\n\n\t/**\n\t * Determine if this authenticator supports in-cms reauthentication\n\t *\n\t * @return bool\n\t */\n\tpublic static function supports_cms()\n\t{\n\t\treturn false;\n\t}\n\n\n\t/**\n\t * Get the name of the authentication method\n\t *\n\t * @return string Returns the name of the authentication method.\n\t */\n\tpublic static function get_name()\n\t{\n\t}\n\n\tpublic static function register($authenticator)\n\t{\n\t\tself::register_authenticator($authenticator);\n\t}\n\n\n\t/**\n\t * Register a new authenticator\n\t *\n\t * The new authenticator has to exist and to be derived from the\n\t * {@link Authenticator}.\n\t * Every authenticator can be registered only once.\n\t *\n\t * @param string $authenticator Name of the authenticator class to\n\t * register\n\t *\n\t * @return bool Returns TRUE on success, FALSE otherwise.\n\t */\n\tpublic static function register_authenticator($authenticator)\n\t{\n\t\t$authenticator = trim($authenticator);\n\n\t\tif (class_exists($authenticator) === false) {\n\t\t\treturn false;\n\t\t}\n\n\t\tif (is_subclass_of($authenticator, 'Authenticator') === false) {\n\t\t\treturn false;\n\t\t}\n\n\t\tif (in_array($authenticator, self::$authenticators, null) === false) {\n\t\t\tif (call_user_func(array($authenticator, 'on_register')) === true) {\n\t\t\t\tself::$authenticators[] = $authenticator;\n\t\t\t} else {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\n\t\treturn true;\n\t}\n\n\tpublic static function unregister($authenticator)\n\t{\n\t\tself::unregister_authenticator($authenticator);\n\t}\n\n\t/**\n\t * Remove a previously registered authenticator\n\t *\n\t * @param string $authenticator Name of the authenticator class to register\n\t *\n\t * @return bool Returns TRUE on success, FALSE otherwise.\n\t */\n\tpublic static function unregister_authenticator($authenticator)\n\t{\n\t\tif (call_user_func(array($authenticator, 'on_unregister')) === true\n\t\t\t&& in_array($authenticator, self::$authenticators, null)\n\t\t) {\n\t\t\tunset(self::$authenticators[array_search($authenticator, self::$authenticators, null)]);\n\t\t}\n\n\t}\n\n\n\t/**\n\t * Check if a given authenticator is registered\n\t *\n\t * @param string $authenticator Name of the authenticator class to check\n\t *\n\t * @return bool Returns TRUE if the authenticator is registered, FALSE\n\t * otherwise.\n\t */\n\tpublic static function is_registered($authenticator)\n\t{\n\t\treturn in_array($authenticator, self::$authenticators, null);\n\t}\n\n\n\t/**\n\t * Get all registered authenticators\n\t *\n\t * @return array Returns an array with the class names of all registered\n\t * authenticators.\n\t */\n\tpublic static function get_authenticators()\n\t{\n\t\t// put default authenticator first (mainly for tab-order on loginform)\n\t\tif ($key = array_search(self::$default_authenticator, self::$authenticators, null)) {\n\t\t\tunset(self::$authenticators[$key]);\n\t\t\tarray_unshift(self::$authenticators, self::$default_authenticator);\n\t\t}\n\n\t\treturn self::$authenticators;\n\t}\n\n\t/**\n\t * Set a default authenticator (shows first in tabs)\n\t *\n\t * @param string\n\t */\n\tpublic static function set_default_authenticator($authenticator)\n\t{\n\t\tself::$default_authenticator = $authenticator;\n\n\n\t}\n\n\t/**\n\t * @return string\n\t */\n\tpublic static function get_default_authenticator()\n\t{\n\t\treturn self::$default_authenticator;\n\t}\n\n\n\t/**\n\t * Callback function that is called when the authenticator is registered\n\t *\n\t * Use this method for initialization of a newly registered authenticator.\n\t * Just overload this method and it will be called when the authenticator\n\t * is registered.\n\t * If the method returns FALSE, the authenticator won't be\n\t * registered!\n\t *\n\t * @return bool Returns TRUE on success, FALSE otherwise.\n\t */\n\tprotected static function on_register()\n\t{\n\t\treturn true;\n\t}\n\n\t/**\n\t * Callback function that is called when an authenticator is removed.\n\t *\n\t * @return bool\n\t */\n\tprotected static function on_unregister()\n\t{\n\t\treturn true;\n\t}\n}\n\n", "meta": {"content_hash": "5633d6e839d8db5ecfdb8a8c160b881b", "timestamp": "", "source": "github", "line_count": 238, "max_line_length": 91, "avg_line_length": 24.071428571428573, "alnum_prop": 0.6844126374585443, "repo_name": "CasaLaguna/silverstripe-security", "id": "c1169657f9287ceb93143c97c360cdcad04c39f4", "size": "5729", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "code/Authentication/Authenticator.php", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "PHP", "bytes": "386361"}]}} {"text": "package php.runtime;\n\nimport org.junit.Assert;\nimport org.junit.FixMethodOrder;\nimport org.junit.Test;\nimport org.junit.runner.RunWith;\nimport org.junit.runners.JUnit4;\nimport org.junit.runners.MethodSorters;\nimport php.runtime.memory.*;\n\n@RunWith(JUnit4.class)\n@FixMethodOrder(MethodSorters.NAME_ASCENDING)\npublic class MemoryTest {\n\n @Test\n public void testNull(){\n Memory memory = Memory.NULL;\n\n Assert.assertEquals(Memory.Type.NULL, memory.type);\n Assert.assertFalse(memory.toBoolean());\n Assert.assertEquals(\"\", memory.toString());\n Assert.assertEquals(0.0, memory.toDouble(), 0.000001);\n Assert.assertEquals(0, memory.toLong());\n\n Assert.assertNotNull(memory.toNumeric());\n Assert.assertEquals(Memory.Type.INT, memory.toNumeric().type);\n Assert.assertEquals(0, memory.toNumeric().toLong());\n\n Assert.assertEquals(memory, memory.toImmutable());\n Assert.assertTrue(memory.isImmutable());\n }\n\n @Test\n public void testFalse(){\n Memory memory = Memory.FALSE;\n\n Assert.assertEquals(Memory.Type.BOOL, memory.type);\n Assert.assertFalse(memory.toBoolean());\n Assert.assertEquals(\"\", memory.toString());\n Assert.assertEquals(0.0, memory.toDouble(), 0.000001);\n Assert.assertEquals(0, memory.toLong());\n\n Assert.assertNotNull(memory.toNumeric());\n Assert.assertEquals(Memory.Type.INT, memory.toNumeric().type);\n Assert.assertEquals(0, memory.toNumeric().toLong());\n\n Assert.assertEquals(memory, memory.toImmutable());\n Assert.assertTrue(memory.isImmutable());\n }\n\n @Test\n public void testTrue(){\n Memory memory = Memory.TRUE;\n\n Assert.assertEquals(Memory.Type.BOOL, memory.type);\n Assert.assertTrue(memory.toBoolean());\n Assert.assertEquals(\"1\", memory.toString());\n Assert.assertEquals(1.0, memory.toDouble(), 0.000001);\n Assert.assertEquals(1, memory.toLong());\n\n Assert.assertNotNull(memory.toNumeric());\n Assert.assertEquals(Memory.Type.INT, memory.toNumeric().type);\n Assert.assertEquals(1, memory.toNumeric().toLong());\n\n Assert.assertEquals(memory, memory.toImmutable());\n Assert.assertTrue(memory.isImmutable());\n }\n\n @Test\n public void testLong(){\n LongMemory memory = new LongMemory(100);\n\n Assert.assertEquals(Memory.Type.INT, memory.type);\n Assert.assertTrue(memory.toBoolean());\n Assert.assertEquals(\"100\", memory.toString());\n Assert.assertEquals(100, memory.toDouble(), 0.000001);\n Assert.assertEquals(100, memory.toLong());\n\n Assert.assertNotNull(memory.toNumeric());\n Assert.assertEquals(Memory.Type.INT, memory.toNumeric().type);\n Assert.assertEquals(100, memory.toNumeric().toLong());\n\n Assert.assertEquals(memory, memory.toImmutable());\n Assert.assertTrue(memory.isImmutable());\n }\n\n @Test\n public void testDouble(){\n DoubleMemory memory = new DoubleMemory(50);\n\n Assert.assertEquals(Memory.Type.DOUBLE, memory.type);\n Assert.assertTrue(memory.toBoolean());\n Assert.assertEquals(\"50\", memory.toString());\n Assert.assertEquals(50, memory.toDouble(), 0.000001);\n Assert.assertEquals(50, memory.toLong());\n\n Assert.assertNotNull(memory.toNumeric());\n Assert.assertEquals(Memory.Type.DOUBLE, memory.toNumeric().type);\n Assert.assertEquals(50, memory.toNumeric().toLong());\n\n Assert.assertEquals(memory, memory.toImmutable());\n Assert.assertTrue(memory.isImmutable());\n }\n\n @Test\n public void testString(){\n StringMemory memory = new StringMemory(\"foobar\");\n\n Assert.assertEquals(Memory.Type.STRING, memory.type);\n Assert.assertTrue(memory.toBoolean());\n Assert.assertEquals(\"foobar\", memory.toString());\n Assert.assertEquals(0.0, memory.toDouble(), 0.000001);\n Assert.assertEquals(0, memory.toLong());\n\n Assert.assertNotNull(memory.toNumeric());\n Assert.assertEquals(Memory.Type.INT, memory.toNumeric().type);\n Assert.assertEquals(0, memory.toNumeric().toLong());\n\n Assert.assertEquals(memory, memory.toImmutable());\n Assert.assertTrue(memory.isImmutable());\n\n Assert.assertNull(StringMemory.toLong(\"-\"));\n Assert.assertEquals(-1, StringMemory.toLong(\"-1\").toLong());\n Assert.assertEquals(32, StringMemory.toLong(\"32\").toLong());\n\n Assert.assertTrue(new BinaryMemory(new byte[]{1,2}).toBoolean());\n Assert.assertFalse(new BinaryMemory(new byte[]{}).toBoolean());\n Assert.assertFalse(new BinaryMemory().toBoolean());\n Assert.assertFalse(new BinaryMemory(new byte[]{'0'}).toBoolean());\n\n Assert.assertEquals(StringMemory.valueOf(\"-\").toNumeric().toLong(), 0);\n Assert.assertEquals(StringMemory.valueOf(\"-\").toLong(), 0);\n }\n\n @Test\n public void testReference(){\n ReferenceMemory memory = new ReferenceMemory(Memory.TRUE);\n\n Assert.assertEquals(Memory.Type.REFERENCE, memory.type);\n Assert.assertTrue(memory.toBoolean());\n Assert.assertEquals(\"1\", memory.toString());\n Assert.assertEquals(1, memory.toDouble(), 0.000001);\n Assert.assertEquals(1, memory.toLong());\n\n Assert.assertNotNull(memory.toNumeric());\n Assert.assertEquals(Memory.Type.INT, memory.toNumeric().type);\n Assert.assertEquals(1, memory.toNumeric().toLong());\n\n Assert.assertNotEquals(memory, memory.toImmutable());\n Assert.assertEquals(Memory.TRUE, memory.toImmutable());\n Assert.assertFalse(memory.isImmutable());\n }\n}\n", "meta": {"content_hash": "cc2290b486b161933e9b12a4c53e4943", "timestamp": "", "source": "github", "line_count": 153, "max_line_length": 79, "avg_line_length": 37.03921568627451, "alnum_prop": 0.6656079054173284, "repo_name": "jphp-compiler/jphp", "id": "8fd727785024cbdb6904cef7e30d4edb33edeff8", "size": "5667", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "jphp-core/tests/php/runtime/MemoryTest.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "2124"}, {"name": "HTML", "bytes": "4259"}, {"name": "Inno Setup", "bytes": "2041"}, {"name": "Java", "bytes": "4396883"}, {"name": "PHP", "bytes": "1570052"}, {"name": "Shell", "bytes": "5234"}]}} {"text": "package controllers\n\nimport (\n\t\"github.com/revel/examples/chat/app/chatroom\"\n\t\"github.com/revel/revel\"\n)\n\ntype LongPolling struct {\n\t*revel.Controller\n}\n\nfunc (c LongPolling) Room(user string) revel.Result {\n\tchatroom.Join(user)\n\treturn c.Render(user)\n}\n\nfunc (c LongPolling) Say(user, message string) revel.Result {\n\tchatroom.Say(user, message)\n\treturn nil\n}\n\nfunc (c LongPolling) WaitMessages(lastReceived int) revel.Result {\n\tsubscription := chatroom.Subscribe()\n\tdefer subscription.Cancel()\n\n\t// See if anything is new in the archive.\n\tvar events []chatroom.Event\n\tfor _, event := range subscription.Archive {\n\t\tif event.Timestamp > lastReceived {\n\t\t\tevents = append(events, event)\n\t\t}\n\t}\n\n\t// If we found one, grand.\n\tif len(events) > 0 {\n\t\treturn c.RenderJSON(events)\n\t}\n\n\t// Else, wait for something new.\n\tevent := <-subscription.New\n\treturn c.RenderJSON([]chatroom.Event{event})\n}\n\nfunc (c LongPolling) Leave(user string) revel.Result {\n\tchatroom.Leave(user)\n\treturn c.Redirect(Application.Index)\n}\n", "meta": {"content_hash": "4e9e12d93a9c60b9b959157cdbb905cc", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 66, "avg_line_length": 21.425531914893618, "alnum_prop": 0.7239324726911619, "repo_name": "zhyhang/gofirst", "id": "49a238d3f1041c63a492152cdbc23955ad0a0c38", "size": "1007", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "example/revel/chat/app/controllers/longpolling.go", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "2056"}, {"name": "Go", "bytes": "95000"}, {"name": "HTML", "bytes": "10530"}, {"name": "JavaScript", "bytes": "1192"}]}} {"text": "ACCEPTED\n\n#### According to\nIndex Fungorum\n\n#### Published in\nNuovo Giorn. Bot. Ital. 8: 161 (1892)\n\n#### Original name\nCytospora oleina Berl.\n\n### Remarks\nnull", "meta": {"content_hash": "a79765625d8b2727b7002af9c65a696c", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 37, "avg_line_length": 12.307692307692308, "alnum_prop": 0.6875, "repo_name": "mdoering/backbone", "id": "f4a467fe14952d23a5ff507a1d6ca73d02ca5221", "size": "206", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Sordariomycetes/Diaporthales/Valsaceae/Cytospora/Cytospora oleina/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "CREATE TABLE `users` (\n\t`id` int unsigned NOT NULL AUTO_INCREMENT PRIMARY KEY,\n\t`access` int NOT NULL DEFAULT 1,\n\t`created` int(10) unsigned NOT NULL,\n\t`last_seen` int(10) unsigned NOT NULL\n) ENGINE='InnoDB';", "meta": {"content_hash": "5823964ee6e3a982725d5ecd1c16311e", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 55, "avg_line_length": 34.666666666666664, "alnum_prop": 0.7211538461538461, "repo_name": "daGrevis/daGrevis.lv-PHP", "id": "9226aa667ca66671bd2d474251617a8fb54d085b", "size": "208", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "application/sql/users.sql", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "3145"}, {"name": "PHP", "bytes": "50433"}, {"name": "Perl", "bytes": "529"}]}} {"text": "#ifndef _flint_h_\r\n#define _flint_h_\r\n\r\n\r\n#include \r\n\r\n/*\r\n\tFor real-time applications you need big integers: the number\r\n\tof recorded samples will exceed the maximum 32-bit signed integer\r\n\tafter only 13.5 hours. So having int64_t would be best,\r\n\tand that is indeed what Praat uses for these same purposes,\r\n\tbut the Vokaturi library has to be able to run with old compilers and linkers.\r\n\tNot all old compilers support int64_t, and not all old linkers\r\n\tsupport 64-bit division without calling external functions.\r\n\tSo we simply employ the type \"double\", which can contain signed integers\r\n\tup to 54 bits.\r\n*/\r\ntypedef double flint;\r\n/*\r\n\tOn these high-precision \"integers\",\r\n\tthe integer division (with its rounding down) and the modulo function\r\n\tcannot be performed with the usual operator symbols for integer types\r\n\t(which are \"/\" and \"%\", respectively),\r\n\tso for those operations we use inline functions instead.\r\n*/\r\ninline static flint flint_div (flint a, flint b) { return floor (a / b); }\r\ninline static flint flint_mod (flint a, flint b) { return a - floor (a / b) * b; }\r\n\r\n/* End of file flint.h */\r\n#endif\r\n", "meta": {"content_hash": "68d3df25c82141c73c7e3d6fb0ac1dab", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 82, "avg_line_length": 37.733333333333334, "alnum_prop": 0.7217314487632509, "repo_name": "equilibrium-catalyst/yadayada-rest-api", "id": "978dfc7f3308bc7a3097e124e2be4b57d0e529a9", "size": "1940", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "restapi/speech/src/flint.h", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Python", "bytes": "21030"}]}} {"text": "\ufeffnamespace PetFinder.Data.Migrations\n{\n using System.Data.Entity.Migrations;\n using System.Linq;\n\n public sealed class Configuration : DbMigrationsConfiguration\n {\n public Configuration()\n {\n this.AutomaticMigrationsEnabled = true;\n this.AutomaticMigrationDataLossAllowed = false;\n }\n\n protected override void Seed(AppDbContext context)\n {\n if (context.Users.Any())\n {\n return;\n }\n\n var dataSeed = new DataSeed(context); \n dataSeed.SeedRoles();\n dataSeed.SeedAdmin();\n dataSeed.SeedUsers();\n dataSeed.SeedRegions();\n dataSeed.SeedPostCategories();\n dataSeed.SeedPets();\n dataSeed.SeedPostsWithComments(\n context.Users.ToList(), \n context.PostCategories.ToList(), \n context.Pets.ToList(),\n context.Regions.ToList());\n dataSeed.SeedImages(context.Posts.ToList());\n }\n }\n}\n", "meta": {"content_hash": "991f9cb39b01a28858e3f71c18617a1f", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 79, "avg_line_length": 30.02777777777778, "alnum_prop": 0.5504162812210915, "repo_name": "DennyGD/PetFinder", "id": "ce7745cbd097fdea31bc2926bab80a7c0a1ea2ec", "size": "1083", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Source/Data/PetFinder.Data/Migrations/Configuration.cs", "mode": "33188", "license": "mit", "language": [{"name": "ASP", "bytes": "104"}, {"name": "C#", "bytes": "232406"}, {"name": "CSS", "bytes": "513"}, {"name": "JavaScript", "bytes": "1116"}]}} {"text": "using SurgSim::Graphics::OsgTexture1d;\n\nOsgTexture1d::OsgTexture1d() : OsgTexture(new osg::Texture1D())\n{\n}\n\nvoid OsgTexture1d::setSize(int width)\n{\n\tgetOsgTexture1d()->setTextureWidth(width);\n}\n\nvoid OsgTexture1d::getSize(int* width) const\n{\n\t*width = getOsgTexture1d()->getTextureWidth();\n\tif (*width == 0 && getOsgTexture()->getNumImages() > 0)\n\t{\n\t\t*width = getOsgTexture()->getImage(0)->s();\n\t}\n}\n", "meta": {"content_hash": "272594718947a868e6f9dcfae58792f1", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 63, "avg_line_length": 21.157894736842106, "alnum_prop": 0.6965174129353234, "repo_name": "simquest/opensurgsim", "id": "e0263cd888c5839f4b7f04b2b05afa9a4ade5175", "size": "1099", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SurgSim/Graphics/OsgTexture1d.cpp", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "75121"}, {"name": "C++", "bytes": "7422720"}, {"name": "CMake", "bytes": "198446"}, {"name": "GLSL", "bytes": "72925"}, {"name": "JavaScript", "bytes": "2659"}, {"name": "Python", "bytes": "56744"}]}} {"text": "\npackage com.asakusafw.testdriver.html;\n\nimport static org.hamcrest.Matchers.*;\nimport static org.junit.Assert.*;\n\nimport java.io.File;\nimport java.io.IOException;\n\nimport org.junit.Rule;\nimport org.junit.Test;\nimport org.junit.rules.TemporaryFolder;\n\nimport com.asakusafw.testdriver.core.DataModelDefinition;\nimport com.asakusafw.testdriver.core.Difference;\nimport com.asakusafw.testdriver.core.DifferenceSink;\nimport com.asakusafw.testdriver.core.DifferenceSinkFactory;\nimport com.asakusafw.testdriver.core.TestContext;\nimport com.asakusafw.testdriver.core.TestToolRepository;\nimport com.asakusafw.testdriver.excel.Simple;\nimport com.asakusafw.testdriver.model.SimpleDataModelDefinition;\n\n/**\n * Test for {@link HtmlDifferenceSinkProvider}.\n */\npublic class HtmlDifferenceSinkProviderTest {\n\n static final DataModelDefinition SIMPLE = new SimpleDataModelDefinition<>(Simple.class);\n\n /**\n * temporary folder.\n */\n @Rule\n public final TemporaryFolder temp = new TemporaryFolder();\n\n /**\n * Load the provider via SPI.\n * @throws Exception if failed\n */\n @Test\n public void spi() throws Exception {\n TestToolRepository repo = new TestToolRepository(getClass().getClassLoader());\n\n File file = temp.newFile(\"example.html\");\n file.delete();\n\n DifferenceSinkFactory factory = repo.getDifferenceSinkFactory(file.toURI());\n try (DifferenceSink sink = factory.createSink(SIMPLE, new TestContext.Empty())) {\n Simple expected = new Simple();\n expected.text = \"expected\";\n Simple actual = new Simple();\n actual.text = \"actual\";\n sink.put(new Difference(\n SIMPLE.toReflection(expected),\n SIMPLE.toReflection(actual),\n \"testing\"));\n }\n\n assertThat(file.exists(), is(true));\n }\n\n /**\n * Attempt to load the provider via SPI, but its extension is wrong.\n * @throws Exception if failed\n */\n @Test\n public void spi_wrong_extension() throws Exception {\n TestToolRepository repo = new TestToolRepository(getClass().getClassLoader());\n\n File file = temp.newFile(\"example.invalid\");\n file.delete();\n\n DifferenceSinkFactory factory = repo.getDifferenceSinkFactory(file.toURI());\n try (DifferenceSink sink = factory.createSink(SIMPLE, new TestContext.Empty())) {\n sink.close();\n fail();\n } catch (IOException e) {\n // ok.\n }\n }\n}\n", "meta": {"content_hash": "06ba7d52dc3272f5a8656026c07a2459", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 100, "avg_line_length": 31.19753086419753, "alnum_prop": 0.66798575385833, "repo_name": "akirakw/asakusafw", "id": "f7eb1cebdf6f0b727632f7d81ab06632039a0382", "size": "3139", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "testing-project/asakusa-test-data-provider/src/test/java/com/asakusafw/testdriver/html/HtmlDifferenceSinkProviderTest.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "31"}, {"name": "CSS", "bytes": "650"}, {"name": "Groovy", "bytes": "275943"}, {"name": "Java", "bytes": "12250766"}, {"name": "Lex", "bytes": "12506"}, {"name": "Shell", "bytes": "10149"}]}} {"text": "//Love on the web\nvar Love;\nLove = Love || defineLove();\n\n//TODO: Look into Web Audio API for more advanced compatibility\nLove.Audio = (function() {\n function Audio() {\n define(this);\n }\n \n function define(self) {\n var ctx = new AudioContext() || new webkitAudioContext();\n var volNode = ctx.createGain();\n volNode.connect(ctx.destination);\n \n var panNode = ctx.createPanner();\n panNode.connect(volNode);\n \n var orientation = [ 0, 0, -1, 0, 1, 0 ];\n var position = [ 0, 0, 0 ];\n var velocity = [ 0, 0, 0 ];\n \n self.getDistanceModel = function() {\n return panNode.distanceModel;\n };\n \n self.getDopplerScale = function() {\n return 0;\n };\n \n self.getOrientation = function() {\n return orientation;\n };\n \n self.getPosition = function() {\n return position;\n };\n \n self.getSourceCount = function() {\n return 0;\n };\n \n self.getVelocity = function() {\n return velocity;\n };\n \n self.getVolume = function() {\n return volNode.gain.volume;\n };\n \n self.newSource = function(name) {\n return new Love.Audio.Source(name, ctx);\n };\n \n self.pause = function(source) {\n \n };\n \n self.play = function(source) {\n \n };\n \n self.resume = function(source) {\n \n };\n \n self.rewind = function(source) {\n \n };\n \n self.setDistanceModel = function() {\n unimplemented(\"love.audio.setDistanceModel\");\n };\n \n self.setDopplerScale = function() {\n unimplemented(\"love.audio.setDopplerScale\");\n };\n \n self.setOrientation = function() {\n unimplemented(\"love.audio.setOrientation\");\n };\n \n self.setPosition = function() {\n unimplemented(\"love.audio.setPosition\");\n };\n \n self.setVelocity = function() {\n unimplemented(\"love.audio.setVelocity\");\n };\n \n self.setVolume = function(volume) {\n volNode.gain.volume = volume;\n };\n \n self.stop = function(source) {\n \n };\n }\n\n return Audio;\n})();\n\nLove.Audio.Source = (function() {\n function ASource(path, ctx) {\n this.panner = ctx.createPanner();\n \n var req = new XMLHttpRequest();\n req.open(\"GET\", \"lua/\" + path, true);\n req.responseType = \"arraybuffer\";\n req.onload = wrap(this, function(e) {\n ctx.decodeAudioData(req.response, wrap(this, function(buffer) {\n this.buffer = buffer;\n }));\n });\n req.send();\n }\n \n ASource.prototype.clone = function(self) {\n \n };\n \n ASource.prototype.getAttenuationDistance = function(self) {\n unimplemented(\"Source:getAttenuationDistance\");\n };\n \n ASource.prototype.getChannels = function(self) {\n if(self.buffer == null) return 0;\n return self.buffer.numberOfChannels;\n };\n \n ASource.prototype.getCone = function() {\n };\n \n return ASource;\n})();;Love = Love || defineLove();\n\nLove.Color = (function() {\n function Color(r, g, b, a) {\n if (typeof r != \"number\") {\n if(typeof r == \"string\") {\n var d = parseInt(/#([A-Fa-f0-9]*)/.exec(r)[1], 16);\n this.r = d >> 16;\n this.g = (d >> 8) & 255;\n this.b = d & 255;\n this.a = 255;\n } else {\n this.r = r.getMember(1) || 0;\n this.g = r.getMember(2) || 0;\n this.b = r.getMember(3) || 0;\n this.a = r.getMember(4) || 255;\n }\n } else {\n this.r = r || 0;\n this.g = g || 0;\n this.b = b || 0;\n this.a = a || 255;\n }\n //this.as_string = (this.r << 16 | this.g << 8 | this.b).toString(16);\n this.as_string = \"rgb(\" + this.r + \",\" + this.g + \",\" + this.b + \")\";\n }\n\n return Color;\n})();;Love = Love || defineLove();\n\nLove.Event = (function() {\n function Event() {\n define(this);\n this.queue = [];\n }\n\n function define(self) {\n self.clear = function() {\n self.queue = [];\n };\n\n self.push = function(eType, a1, a2, a3, a4) {\n var event = [eType, a1, a2, a3, a4];\n self.queue.push(event);\n };\n\n self.quit = function() {\n self.push(\"quit\");\n };\n\n self.pump = function() { /* Uneeded In JS */ };\n\n self.poll = function() {\n unimplemented(\"love.event.poll\");\n };\n\n self.wait = function() {\n unimplemented(\"love.event.wait\");\n }\n }\n\n return Event;\n})();\n;Love = Love || defineLove();\n\n//Most of FileSystem can't be reinplemented but local storage will help\nLove.FileSystem = (function() {\n function FileSystem() {\n define(this);\n }\n \n function define(self) {\n //Things that will work\n self.append = function(name, data) {\n localStorage[name] += data;\n };\n \n self.areSymlinksEnabled = function() {\n return false;\n };\n \n self.createDirectory = function(name) {\n //Unneeded in JS\n };\n \n self.exists = function(name) {\n return localStorage.getItem(name) != null;\n };\n \n self.getAppdataDirectory = function() {\n return \"\";\n };\n \n self.getDirectoryItems = function(dir) {\n return new shine.Table();\n };\n \n self.getIdentity = function() {\n return \"\"; \n };\n \n self.getSaveDirectory = function() {\n return \"\"; \n };\n \n self.getSourceBaseDirectory = function() {\n return \"\"; \n };\n \n self.getUserDirectory = function() {\n return \"\"; \n };\n \n self.getWorkingDirectory = function() {\n return \"\"; \n };\n \n self.init = function() {\n //Unneeded in JS \n };\n \n self.isDirectory = function(name) {\n return false;\n };\n \n self.isFile = function(name) {\n return typeof localStorage[name] == \"string\";\n };\n \n self.isFused = function() {\n return false;\n };\n \n self.isSymlink = function() {\n return false; \n };\n \n self.lines = function(filename) {\n return new shine.Table(localStorage[filename].split(\"\\n\"));\n };\n \n self.read = function(filename) {\n return localStorage[filename];\n };\n \n self.remove = function(name) {\n localStorage.removeItem(name);\n };\n \n self.write = function(name, data) {\n localStorage.setItem(name, data);\n };\n \n //Things that won't work\n self.getLastModified = function() {\n neverimplemented(\"love.filesystem.getLastModified\"); \n };\n \n self.getRealDirectory = function() {\n neverimplemented(\"love.filesystem.getRealDirectory\");\n };\n \n self.getSize = function() {\n neverimplemented(\"love.filesystem.getSize\"); \n };\n \n self.load = function() {\n neverimplemented(\"love.filesystem.load\"); \n };\n \n self.mount = function() {\n neverimplemented(\"love.filesystem.mount\"); \n };\n \n self.newFile = function() {\n neverimplemented(\"love.filesystem.newFile\");\n };\n \n self.newFileData = function() {\n neverimplemented(\"love.filesystem.newFileData\"); \n };\n \n self.setIdentity = function() {\n neverimplemented(\"love.filsystem.setIdentity\");\n };\n \n self.setSource = function() {\n neverimplemented(\"love.filesystem.setSource\"); \n };\n \n self.setSymlinkEnabled = function() {\n neverimplemented(\"love.filesystem.setSymlinkEnabled\"); \n };\n \n self.unmount = function() {\n neverimplemented(\"love.filesystem.unmount\"); \n };\n }\n\n return FileSystem;\n})();\n;Love = Love || defineLove();\n\nLove.Font = (function() {\n function Font() {\n define(this);\n }\n \n function define(self) {\n self.newGlyphData = function() {\n neverimplemented(\"love.font.newGlyphData\"); \n };\n \n self.newRasterizer = function() {\n neverimplemented(\"love.font.newRasterizer\"); \n };\n }\n\n return Font;\n})();\n;Love = Love || defineLove();\n\nLove.Graphics = (function() {\n function Graphics(width, height) {\n define(this);\n\n if(Love.element == null) {\n this.canvas = new Love.Graphics.Canvas2D(width, height, null, this);\n document.body.appendChild(this.canvas.elem);\n Love.element = this.canvas.elem;\n }\n else {\n this.canvas = new Love.Graphics.Canvas2D(width, height, Love.element, this);\n }\n //Show the canvas that will be on screen\n this.canvas.elem.style.display = \"block\";\n\n this.__mainCanvas = this.canvas;\n this.ctx = this.canvas.ctx;\n this.__matrix = this.canvas.matrix;\n\n this.setColor(255, 255, 255);\n this.setBackgroundColor(0, 0, 0);\n }\n \n function define(self) {\n self.arc = function(mode, x, y, rad, a1, a2, segments) {\n segments = segments || 10;\n var ctx = self.ctx, interval, i, cx, cy;\n ctx.beginPath();\n if(mode == \"fill\") {\n ctx.moveTo(x, y);\n } else { \n ctx.moveTo(x + Math.cos(a1) * rad, y + Math.sin(a1) * rad);\n } \n interval = (a2 - a1) / segments;\n for(i = a1; i <= a2; i += interval) {\n cx = Math.cos(i) * rad + x;\n cy = Math.sin(i) * rad + y;\n ctx.lineTo(cx, cy);\n }\n if(mode == \"fill\") {\n ctx.closePath();\n ctx.fill();\n } else {\n ctx.stroke();\n }\n };\n\n self.circle = function(mode, x, y, rad, segments) {\n if(rad < 0) return;\n self.arc(mode, x, y, rad, 0, Math.PI * 2, segments);\n };\n\n self.clear = function(r, g, b, a) {\n var c, ctx = self.ctx;\n if(r == null) {\n c = self.canvas.backgroundColor;\n } else {\n if(typeof r == \"number\") {\n c = new Love.Color(r, g, b, a);\n } else {\n c = new Love.Color(r);\n }\n }\n if(c.a == 0) { return; }\n ctx.save();\n ctx.setTransform(1, 0, 0, 1, 0, 0);\n ctx.fillStyle = c.as_string;\n ctx.globalAlpha = c.a / 255;\n ctx.fillRect(0, 0, self.canvas.width, self.canvas.height);\n ctx.restore();\n };\n\n self.draw = function(drawable, quad, x, y, r, sx, sy, ox, oy, kx, ky) {\n if(typeof quad == \"number\") {\n __drawWhole(drawable, quad || 0, x || 0, y || 0, r || 1, sx || 1, sy || 0, ox || 0, oy || 0, kx || 0);\n } else {\n __drawWithQuad(drawable, quad, x || 0, y || 0, r || 0, sx || 1, sy || 1, ox || 0, oy || 0, kx || 0, ky || 0);\n }\n };\n \n var __drawWhole = function(drawable, x, y, r, sx, sy, ox, oy, kx, ky) {\n var ctx = self.ctx;\n var c = r == 0 ? 1 : Math.cos(r);\n var s = r == 0 ? 0 : Math.sin(r);\n var matrix = self.__matrix.x($M([\n [sx * c - kx * sy * s, ky * sx * c - sy * s, x - ox],\n [sx * s + kx * sy * c, ky * sx * s + sy * c, y - oy],\n [0, 0, 1 ]\n ]));\n \n ctx.save();\n self.__updateTransform(matrix);\n ctx.drawImage(drawable.elem, 0, 0);\n ctx.restore();\n };\n \n var __drawWithQuad = function(drawable, quad, x, y, r, sx, sy, ox, oy, kx, ky) {\n var ctx = self.ctx, w = drawable.getWidth(), h = drawable.getHeight();\n var c = r == 0 ? 1 : Math.cos(r);\n var s = r == 0 ? 0 : Math.sin(r);\n var matrix = self.__matrix.x($M([\n [sx * c - kx * sy * s, ky * sx * c - sy * s, x - ox],\n [sx * s + kx * sy * c, ky * sx * s + sy * c, y - oy],\n [0, 0, 1 ]\n ]));\n \n ctx.save();\n self.__updateTransform(matrix);\n ctx.drawImage(drawable.elem, quad.x, quad.y, quad.w, quad.h, 0, 0, w, h);\n ctx.restore();\n };\n \n self.line = function(x1, y1, x2, y2) {\n var ctx = self.ctx;\n ctx.beginPath();\n if(typeof x1 == \"number\") {\n ctx.moveTo(x1, y1);\n ctx.lineTo(x2, y2);\n ctx.stroke();\n } else {\n ctx.moveTo(x1.getMember(1), x1.getMember(2));\n ctx.lineTo(x1.getMember(3), x1.getMember(4));\n ctx.stroke();\n }\n ctx.closePath();\n };\n \n self.point = function(x, y) {\n self.ctx.fillRect(x, y, 1, 1);\n };\n \n self.polygon = function(mode, verts) {\n var ctx = self.ctx, i, x, y;\n ctx.beginPath();\n ctx.moveTo(verts.getMember(1), verts.getMember(2));\n for(i = 3; i <= verts.__shine.numValues.length; i += 2) {\n x = verts.getMember(i);\n y = verts.getMember(i + 1);\n ctx.lineTo(x, y);\n }\n ctx.closePath();\n if(mode == \"fill\") {\n ctx.fill();\n } else {\n ctx.stroke();\n }\n };\n \n self.present = function() { /*Uneeded in JS*/ };\n \n self.print = function(text, x, y, r, sx, sy, ox, oy, kx, ky) {\n };\n \n self.printf = function(text, x, y, limit, align, r, sx, sy, ox, oy, kx, ky) {\n };\n \n self.rectangle = function(mode, x, y, w, h) {\n if(mode == \"fill\") {\n self.ctx.fillRect(x, y, w, h);\n } else {\n self.ctx.strokeRect(x, y, w, h);\n }\n };\n \n //Transformations\n self.__updateTransform = function(m) {\n var matrix = m || self.__matrix;\n self.ctx.setTransform(matrix.e(1, 1), matrix.e(2, 1), matrix.e(1, 2), matrix.e(2, 2), matrix.e(1, 3), matrix.e(2, 3)); \n };\n \n self.origin = function() {\n self.__matrix = Matrix.I(3);\n self.__updateTransform();\n };\n \n self.pop = function() {\n self.ctx.restore();\n };\n \n self.push = function() {\n self.ctx.save();\n };\n \n self.scale = function(x, y) {\n self.__matrix = self.__matrix.x($M([\n [x, 0, 0],\n [0, y, 0],\n [0, 0, 1]\n ]));\n self.__updateTransform();\n };\n \n self.translate = function(x, y) {\n self.__matrix = self.__matrix.x($M([\n [1, 0, x],\n [0, 1, y],\n [0, 0, 1]\n ]));\n self.__updateTransform();\n };\n \n self.rotate = function(rad) {\n var c = Math.cos(rad);\n var s = Math.sin(rad);\n self.__matrix = self.__matrix.x($M([\n [c, -s, 0],\n [s, c, 0],\n [0, 0, 1]\n ]));\n self.__updateTransform();\n };\n \n self.shear = function(x, y) {\n self.__matrix = self.__matrix.x($M([\n [1, y, 0],\n [x, 1, 0],\n [0, 0, 1]\n ])); \n self.__updateTransform();\n };\n \n //Constructors\n self.newCanvas = function(width, height) {\n return new Love.Graphics.Canvas2D(width, height, this);\n };\n \n self.newImage = function(path) {\n return new Love.Graphics.Image(path); \n };\n \n self.newQuad = function(x, y, w, h, sw, sh) {\n return new Love.Graphics.Quad(x, y, w, h); \n };\n \n self.newFont = function(name, size) {\n return new Love.Graphics.Font(name, size); \n };\n \n self.newImageFont = function(name, glyphs) {\n return new Love.Graphics.ImageFont(name, glyphs); \n };\n \n //Window type things\n self.getWidth = function() {\n return self.canvas.width;\n };\n \n self.getHeight = function() {\n return self.canvas.height;\n };\n \n self.getDimensions = function() {\n return self.canvas.getDimensions();\n };\n\n //State\n //TODO: Implement all state functions\n self.getBackgroundColor = function() {\n var c = self.canvas.backgroundColor;\n return [ c.r, c.g, c.b, c.a ]; \n };\n \n self.getBlendMode = function() {\n var c = self.ctx;\n if(c.globalCompositeOperation == \"source-over\") {\n return \"alpha\";\n } else if(c.globalCompositeOperation == \"multiply\") {\n return \"multiplicative\";\n } else if(c.globalCompositeOperation == \"lighten\") {\n return \"additive\";\n } else {\n return \"normal\";\n }\n };\n \n self.getCanvas = function() {\n return self.canvas; \n };\n \n self.getColor = function() {\n var c = new Love.Color(self.ctx.fillStyle);\n return [c.r, c.g, c.b, self.ctx.globalAlpha * 255];\n };\n \n self.setCanvas = function(canvas) {\n self.canvas = canvas || self.__mainCanvas;\n self.ctx = self.canvas.ctx;\n self.__matrix = self.canvas.matrix;\n self.__updateTransform();\n };\n \n self.setColor = function(r, g, b, a) {\n var c = new Love.Color(r, g, b, a), ctx = self.ctx;\n ctx.fillStyle = c.as_string;\n ctx.strokeStyle = c.as_string;\n ctx.globalAlpha = c.a / 255;\n };\n\n self.setBackgroundColor = function(r, g, b, a) {\n self.canvas.setBackgroundColor(r, g, b, a);\n };\n }\n \n return Graphics;\n})();\n\n\n//TODO: Look at the pull request on punchdrunk for ideas to make this proper\nLove.Graphics.Font = (function() {\n function Font(name, size) {\n define(this);\n \n this.name = name;\n this.size = size;\n \n this.code = size + \"px \" + name;\n }\n \n function define(self) {\n //Most of these functions will not be properly implemented\n self.getAscent = function() {\n return 0;\n };\n \n self.getBaseline = function() {\n return 0;\n };\n \n self.getDescent = function() {\n return 0;\n };\n \n self.getFilter = function() {\n return [\"nearest\", \"nearest\", 1];\n };\n \n self.getHeight = function() {\n return self.size;\n };\n \n self.getLineHeight = function() {\n return self.size;\n };\n \n self.getWidth = function(_, line) {\n unimplemented(\"Font:getWidth\");\n };\n \n self.getWrap = function(_, lines, width) {\n unimplemented(\"Font:getWrap\");\n };\n \n self.hasGlyphs = function() {\n return false; \n };\n \n self.setFilter = function() {\n unimplemented(\"Font:setFilter\");\n };\n \n self.setLineHeight = function() {\n unimplemented(\"Font:setLineHeight\"); \n };\n }\n \n return Font;\n})();\n\nLove.Graphics.ImageFont = (function() {\n function ImageFont(name, glyphs) {\n this.name = name;\n this.glyphs = glyphs;\n this.chars = {};\n \n define(this);\n }\n \n function define(self) {\n new Love.Graphics.Image(self.name, function(img) {\n self.__img = img;\n \n var charwidth = img.getWidth() / self.glyphs.length,\n i;\n for(i = 0; i < self.glyphs.length; i++) {\n self.chars[self.glyphs.charAt(i)] = new Love.Graphics.Quad(i * charwidth, 0, charwidth, img.getHeight());\n }\n });\n \n //Most of these functions will not be properly implemented\n self.getAscent = function() {\n return 0;\n };\n \n self.getBaseline = function() {\n return 0;\n };\n \n self.getDescent = function() {\n return 0; \n };\n \n self.getFilter = function() {\n return [\"nearest\", \"nearest\", 1];\n };\n \n self.getHeight = function() {\n return self.__img.getHeight();\n };\n \n self.getLineHeight = function() {\n return self.__img.getHeight();\n };\n \n self.getWidth = function(_, line) {\n return self.__img.getWidth() / self.glyphs.length;\n };\n \n self.getWrap = function(_, lines, width) {\n unimplemented(\"ImageFont:getWrap\");\n };\n \n self.hasGlyphs = function() {\n return false;\n };\n \n self.setFilter = function() {\n unimplemented(\"ImageFont:setFilter\");\n };\n \n self.setLineHeight = function() {\n unimplemented(\"ImageFont:setLineHeight\");\n };\n }\n \n return ImageFont;\n})();\n\nLove.Graphics.Image = (function() {\n function LImage(path, onload) {\n define(this);\n \n var cFunc = wrap(this, function() {\n if(onload) onload.call(null, this);\n });\n \n if(typeof path == \"string\") {\n this.elem = document.querySelector(\"[src='lua/\"+path+\"']\");\n if(this.elem == null) {\n this.elem = document.createElement(\"img\");\n this.elem.src = \"lua/\" + path;\n this.elem.onload = cFunc;\n } else {\n cFunc.call();\n }\n } else {\n this.elem = document.createElement(\"img\");\n this.elem.src = \"data:image/\" + path.getExtension(path) + \";base64,\" + path.getString(path);\n this.elem.onload = cFunc;\n }\n }\n \n function define(self) {\n self.getData = function() {\n return new shine.Table();\n };\n \n self.getDimensions = function() {\n return [self.elem.width, self.elem.height]; \n };\n \n self.getFilter = function() {\n neverimplemented(\"Image:getFilter\"); \n };\n \n self.getHeight = function() {\n return self.elem.height; \n };\n \n self.getMipmapFilter = function() {\n neverimplemented(\"Image:getMipmapFilter\"); \n };\n \n self.getWidth = function() {\n return self.elem.width; \n };\n \n self.getWrap = function() {\n return \"none\"; \n };\n \n self.isCompressed = function() {\n return false; \n };\n \n self.refresh = function() {\n unimplemented(\"Image:refresh\"); \n };\n \n self.setFilter = function() {\n neverimplemented(\"Image:setFilter\"); \n };\n \n self.setMipmapFilter = function() {\n neverimplemented(\"Image:setMipmapFilter\"); \n };\n \n self.setWrap = function() {\n neverimplemented(\"Image:setWrap\");\n };\n }\n \n return LImage;\n})();\n\nLove.Graphics.Quad = (function() {\n function Quad(x, y, w, h) {\n this.x = x;\n this.y = y;\n this.w = w;\n this.h = h;\n }\n \n Quad.prototype.getViewport = function(self) {\n return [self.x, self.y, self.w, self.h];\n };\n \n Quad.prototype.setViewport = function(self, x, y, w, h) {\n self.x = x;\n self.y = y;\n self.w = w;\n self.h = h;\n };\n \n return Quad;\n})();\n\nLove.Graphics.Canvas2D = (function() {\n function Canvas2D(width, height, elem, graphics) {\n define(this, graphics);\n\n this.elem = elem || document.createElement(\"canvas\");\n //Hide canvas by default for off-screen rendering\n this.elem.style.display = \"none\";\n this.setDimensions(width, height);\n \n this.matrix = $M([\n [1, 0, 0],\n [0, 1, 0],\n [0, 0, 1]\n ]);\n\n this.ctx = elem.getContext(\"2d\");\n this.setBackgroundColor(0, 0, 0, 255);\n }\n \n function define(self, graphics) {\n self.getDimensions = function() {\n return [self.width, self.height];\n };\n \n self.getFilter = function() {\n if(self.ctx.imageSmoothingEnabled) {\n return \"linear\";\n } else {\n return \"nearest\";\n }\n };\n \n self.getFormat = function() {\n return \"normal\"; \n };\n \n self.getHeight = function() {\n return self.height;\n };\n \n self.getImageData = function() {\n var data = self.ctx.getImageData(0, 0, self.width, self.height);\n return new ImageData(data);\n };\n \n self.getMSAA = function() {\n return 0; \n };\n \n self.getPixel = function(_, x, y) {\n var data = self.ctx.getImageData(x, y, 1, 1);\n return [data[0], data[1], data[2], data[3]];\n };\n \n self.getWidth = function() {\n return self.width;\n };\n \n self.getWrap = function() {\n return \"none\";\n };\n \n self.renderTo = function(_, func) {\n graphics.setCanvas(self);\n func.call();\n graphics.setCanvas();\n };\n \n self.setFilter = function(_, filter) {\n var smoothing = filter == \"linear\", ctx = self.ctx;\n ctx.imageSmoothingEnabled = smoothing;\n ctx.mozImageSmoothingEnabled = smoothing;\n ctx.webkitImageSmoothingEnabled = smoothing;\n ctx.msImageSmoothingEnabled = smoothing;\n };\n \n self.setWrap = function() {\n unimplemented(\"Canvas:setWrap\"); \n };\n\n //These are non-standard but are used thoughout the engine\n self.setDimensions = function(width, height) {\n self.setWidth(width);\n self.setHeight(height);\n };\n\n self.setWidth = function(width) {\n self.width = width;\n self.elem.setAttribute('width', width);\n };\n\n self.setHeight = function(height) {\n self.height = height;\n self.elem.setAttribute('height', height);\n };\n \n self.setBackgroundColor = function(r, g, b, a) {\n var c = new Love.Color(r, g, b, a);\n self.backgroundColor = c;\n };\n }\n \n return Canvas2D;\n})();\n;Love = Love || defineLove();\n\nLove.Joystick = (function() {\n function Joystick() {\n\n }\n\n return Joystick;\n})();;Love = Love || defineLove();\n\n//TODO: Add key repeating and text-input\nLove.Keyboard = (function() {\n function Keyboard(event) {\n define(this, event);\n }\n \n function define(self, event) {\n var keysDown = {};\n var repeat = false;\n \n document.addEventListener(\"keydown\", function(e) {\n e.preventDefault();\n e.stopPropagation();\n \n key = getKey(e);\n if(keysDown[key] && repeat) {\n event.push(\"keypressed\", key, true);\n }\n if(!keysDown[key] && !repeat) {\n event.push(\"keypressed\", key, false);\n }\n keysDown[key] = true;\n }, true);\n \n document.addEventListener(\"keyup\", function(e) {\n e.preventDefault();\n e.stopPropagation();\n \n key = getKey(e);\n keysDown[key] = false;\n \n event.push(\"keyreleased\", key);\n }, true);\n \n var keys = {\n 8: \"backspace\",\n 9: \"tab\",\n 13: \"return\",\n 16: \"shift\",\n 17: \"ctrl\",\n 18: \"alt\",\n 19: \"pause\", 20: \"capslock\", 27: \"escape\",\n 33: \"pageup\", 34: \"pagedown\", 35: \"end\", 36: \"home\", 45: \"insert\", 46: \"delete\",\n 37: \"left\", 38: \"up\", 39: \"right\", 40: \"down\",\n 91: \"lmeta\", 92: \"rmeta\", 93: \"mode\",\n 96: \"kp0\", 97: \"kp1\", 98: \"kp2\", 99: \"kp3\", 100: \"kp4\", 101: \"kp5\",\n 102: \"kp6\", 103: \"kp7\", 104: \"kp8\", 105: \"kp9\",\n 106: \"kp*\", 107: \"kp+\", 109: \"kp-\", 110: \"kp.\", 111: \"kp/\",\n 112: \"f1\", 113: \"f2\", 114: \"f3\", 115: \"f4\", 116: \"f5\", 117: \"f6\", 118: \"f7\",\n 119: \"f8\", 120: \"f9\", 121: \"f10\", 122: \"f11\", 123: \"f12\",\n 144: \"numlock\", 145: \"scrolllock\",\n 186: \",\", 187: \"=\", 188: \",\", 189: \"-\", 190: \".\", 191: \"/\", 192: \"`\",\n 219: \"[\", 220: \"\\\\\",221: \"]\", 222: \"'\"\n };\n \n var shiftKeys = {\n 192:\"~\", 48:\")\", 49:\"!\", 50:\"@\", 51:\"#\", 52:\"$\", 53:\"%\", 54:\"^\", 55:\"&\", 56:\"*\", 57:\"(\", 109:\"_\", 61:\"+\",\n 219:\"{\", 221:\"}\", 220:\"|\", 59:\":\", 222:\"\\\"\", 188:\"<\", 189:\">\", 191:\"?\",\n 96:\"insert\", 97:\"end\", 98:\"down\", 99:\"pagedown\", 100:\"left\", 102:\"right\", 103:\"home\", 104:\"up\", 105:\"pageup\"\n };\n \n var rightKeys = {\n 16: \"rshift\", 17: \"rctrl\", 18: \"ralt\"\n };\n \n function getKey(e) {\n var code, key;\n code = e.which;\n if(event.location && event.location > 1) {\n key = rightKeys[code];\n } else if(event.shiftKey) {\n key = shiftKeys[code] || keys[code];\n } else {\n key = keys[code];\n }\n \n if (typeof key == \"undefined\") {\n key = String.fromCharCode(code);\n if(!e.shiftKey) {\n key = key.toLowerCase();\n }\n }\n return key;\n }\n \n self.hasKeyRepeat = function() {\n return repeat;\n };\n \n self.hasTextInput = function() {\n return false;\n };\n \n self.isDown = function(key) {\n if(!keysDown[key]) {\n return false;\n } else {\n return keysDown[key];\n }\n };\n \n self.setKeyRepeat = function(r) {\n repeat = r;\n };\n \n self.setTextInput = function() {\n unimplemented(\"love.keyboard.setTextInput\"); \n };\n \n self.getScancodeFromKey = function() {\n neverimplemented(\"love.keyboard.getScancodeFromKey\"); \n };\n \n self.getKeyFromScancode = function() {\n neverimplemented(\"love.keyboard.getKeyFromScancode\"); \n };\n }\n\n return Keyboard;\n})();;function defineLove() {\n return (function() {\n function Love(elem, conf) {\n wrap = function(t, f) {\n return function() {\n f.apply(t, arguments);\n }\n };\n \n unimplemented = function(name) {\n console.warn(\"[\", name, \"] hasn't been implemented yet\"); \n };\n \n neverimplemented = function(name) {\n console.warn(\"[\", name, \"] can not be implemented in JS\");\n };\n \n elem = elem || null;\n Love.element = elem;\n \n this.graphics = new Love.Graphics(conf.width, conf.height);\n this.event = new Love.Event();\n this.window = new Love.Window(this.graphics, this.event);\n this.audio = new Love.Audio();\n this.filesystem = new Love.FileSystem();\n this.font = new Love.Font();\n this.joystick = new Love.Joystick();\n this.keyboard = new Love.Keyboard(this.event);\n this.math = new Love.Math();\n this.mouse = new Love.Mouse(this.event, this.window);\n this.sound = new Love.Sound();\n this.system = new Love.System();\n this.timer = new Love.Timer();\n \n this.run = wrap(this, this.run);\n }\n \n Love.prototype.load = function() { };\n Love.prototype.update = function() { };\n Love.prototype.draw = function() { };\n Love.prototype.quit = function() { };\n Love.prototype.keypressed = function() { };\n Love.prototype.keyreleased = function() { };\n Love.prototype.mousefocus = function() { };\n Love.prototype.mousemoved = function() { };\n Love.prototype.mousepressed = function() { };\n Love.prototype.mousereleased = function() { };\n Love.prototype.resize = function() { };\n Love.prototype.run = function() {\n this.load.call();\n this.timer.step(); // Step the timer so it doesn't count load time\n \n var i = 0, e;\n var gameloop = (function(self) {\n return function() {\n for(i = 0; i < self.event.queue.length; i++) {\n e = self.event.queue[i];\n self[e[0]].apply(null, e.slice(1, e.length));\n }\n self.event.clear();\n \n self.timer.step();\n self.update.call(null, self.timer.getDelta());\n \n self.graphics.origin()\n self.graphics.clear();\n self.draw.call();\n \n self.timer.nextFrame(gameloop);\n };\n })(this);\n \n this.timer.nextFrame(gameloop);\n };\n Love.prototype.visible = function() { };\n \n return Love;\n })();\n \n};Love = Love || defineLove();\n\nLove.Math = (function() {\n function LMath() {\n define(this);\n }\n \n function define(self) {\n \n }\n\n return LMath;\n})();;Love = Love || defineLove();\n\n//TODO: Implement Pointer-lock api for setGrabbed\nLove.Mouse = (function() {\n function Mouse(event, win) {\n define(this, event, win);\n } \n \n function define(self, event, win) {\n var buttons = {\n \"l\" : false,\n \"m\" : false,\n \"r\" : false,\n \"wd\": false,\n \"wu\": false,\n \"x1\": false,\n \"x2\": false\n };\n \n var love_buttons = [\"l\", \"m\", \"r\", \"x1\", \"x2\"];\n \n var __x = 0;\n var __y = 0;\n var __cursor = new Love.Mouse.Cursor();\n \n Love.element.addEventListener(\"mousedown\", function(e) {\n var x, y, dims, rect = Love.element.getBoundingClientRect();\n e.preventDefault();\n e.stopPropagation();\n x = e.clientX - rect.left;\n y = e.clientY - rect.top;\n if(win.getFullscreen()) {\n dims = win.getDimensions();\n x *= (dims[0] / window.innerWidth);\n y *= (dims[1] / window.innerHeight);\n }\n \n buttons[e.which] = true;\n \n __x = x;\n __y = y;\n event.push(\"mousepressed\", x, y, love_buttons[e.which - 1]);\n }, true);\n \n Love.element.addEventListener(\"mouseup\", function(e) {\n var x, y, dims, rect = Love.element.getBoundingClientRect();\n e.preventDefault();\n e.stopPropagation();\n x = e.clientX - rect.left;\n y = e.clientY - rect.top;\n if(win.getFullscreen()) {\n dims = win.getDimensions();\n x *= (dims[0] / window.innerWidth);\n y *= (dims[1] / window.innerHeight);\n }\n \n buttons[e.which] = false;\n \n __x = x;\n __y = y;\n event.push(\"mousereleased\", x, y, love_buttons[e.which - 1]);\n }, true);\n \n Love.element.addEventListener(\"mousemove\", function(e) {\n var x, y, dims, dx, dy, rect = Love.element.getBoundingClientRect();\n e.preventDefault();\n e.stopPropagation();\n x = e.clientX - rect.left;\n y = e.clientY - rect.top;\n if(win.getFullscreen()) {\n dims = win.getDimensions();\n x *= (dims[0] / window.innerWidth);\n y *= (dims[1] / window.innerHeight);\n }\n dx = x - __x;\n dy = y - __y;\n \n __x = x;\n __y = y;\n event.push(\"mousemoved\", x, y, dx, dy);\n }, true);\n \n Love.element.addEventListener(\"wheel\", function(e) {\n var x, y, dims, rect = Love.element.getBoundingClientRect(), up;\n e.preventDefault();\n e.stopPropagation();\n x = e.clientX - rect.left;\n y = e.clientY - rect.top;\n if(win.getFullscreen()) {\n dims = win.getDimensions();\n x *= (dims[0] / window.innerWidth);\n y *= (dims[1] / window.innerHeight);\n }\n up = e.deltaY < 0;\n event.push(\"mousepressed\", x, y, up ? \"wu\" : \"wd\");\n }, true);\n \n self.getCursor = function() {\n return __cursor;\n };\n \n self.getPosition = function() {\n return [__x, __y]; \n };\n \n self.getRelativeMode = function() {\n return false; \n };\n \n self.getSystemCursor = function(type) {\n return new Love.Mouse.Cursor(type); \n };\n \n self.getX = function() {\n return __x;\n };\n \n self.getY = function() {\n return __y;\n };\n \n self.isDown = function(button) {\n return buttons[button];\n };\n \n self.isGrabbed = function() {\n return false; \n };\n \n self.isVisible = function() {\n return __cursor.__visible;\n };\n \n self.newCursor = function(data) {\n unimplemented(\"love.mouse.newCursor\"); \n };\n \n self.setCursor = function(cursor) {\n __cursor = cursor;\n Love.element.style.cursor = __cursor.__getHtmlType();\n };\n \n self.setGrabbed = function() {\n neverimplemented(\"love.mouse.setGrabbed\"); \n };\n \n self.setPosition = function() {\n neverimplemented(\"love.mouse.setPosition\"); \n };\n \n self.setRelativeMode = function() {\n neverimplemented(\"love.mouse.setRelativeMode\"); \n };\n \n self.setVisible = function(visible) {\n __cursor.__visible = visible;\n Love.element.style.cursor = __cursor.__getHtmlType();\n };\n \n self.setX = function() {\n neverimplemented(\"love.mouse.setX\");\n };\n \n self.setY = function() {\n neverimplemented(\"love.mouse.setY\"); \n };\n }\n\n return Mouse;\n})();\n\nLove.Mouse.Cursor = (function() {\n function Cursor(type, visible) {\n this.type = type || \"arrow\";\n this.__visible = visible != null ? visible : true;\n }\n \n var htmlcursor = {\n \"arrow\" : \"default\",\n \"ibeam\" : \"text\",\n \"wait\" : \"wait\",\n \"waitarrow\" : \"progress\",\n \"crosshair\" : \"crosshair\",\n \"sizenwse\" : \"nwse-resize\",\n \"sizenesw\" : \"nesw-resize\",\n \"sizewe\" : \"ew-resize\",\n \"sizens\" : \"ns-resize\",\n \"sizeall\" : \"move\",\n \"no\" : \"not-allowed\",\n \"hand\" : \"grab\"\n }\n \n Cursor.prototype.__getHtmlType = function() {\n return !this.__visible ? \"none\" : htmlcursor[this.type];\n };\n \n Cursor.prototype.getType = function(self) {\n return self.type;\n };\n \n return Cursor;\n})();;//Ha ha ha ha ha no....;Love = Love || defineLove();\n\nLove.Sound = (function() {\n function Sound() {\n define(this);\n }\n \n function define(self) {\n self.newDecoder = function() {\n neverimplemented(\"love.sound.newDecoder\"); \n };\n \n self.newSoundData = function() {\n neverimplemented(\"love.sound.newSoundData\"); \n };\n }\n\n return Sound;\n})();;Love = Love || defineLove();\n\nLove.System = (function() {\n function System() {\n define(this);\n \n navigator.battery = navigator.battery || navigator.webkitBattery || navigator.mozBattery || navigator.msBattery;\n if(!navigator.battery) {\n //NOTE: This will not update as the program continues\n navigator.getBattery().then(function(battery) {\n navigator.battery = battery; \n });\n }\n }\n \n function define(self) {\n var clipboardText = \"\";\n \n self.getClipboardText = function() {\n return clipboardText;\n };\n \n self.setClipboardText = function(text) {\n clipboardText = text; \n };\n \n self.getOS = function() {\n return \"Web \" + navigator.appVersion;\n };\n \n self.getPowerInfo = function() {\n if(navigator.battery) {\n var state = \"\",\n percent = Math.floor(navigator.battery.level * 100),\n discharge = navigator.battery.dischargingTime;\n if(navigator.battery.charging) {\n if(percent >= 99) {\n state = \"charged\";\n } else {\n state = \"charging\";\n }\n } else {\n state = \"battery\";\n }\n return [state, percent, discharge];\n } else {\n return [\"nobattery\", null, null];\n }\n };\n \n self.getProcessorCount = function() {\n return navigator.hardwareConcurrency || 1;\n };\n \n self.openURL = function(url) {\n window.open(url);\n };\n }\n\n return System;\n})();;Love = Love || defineLove();\n\nLove.Timer = (function() {\n function Timer() {\n define(this);\n \n window.requestAnimationFrame = window.requestAnimationFrame || function(c) {\n setTimeout(c, 60/1000);\n };\n }\n \n function define(self) {\n var dtLimit = 0.25;\n \n var dt = 0;\n var tp = Date.now();\n \n self.getDelta = function() {\n return dt;\n };\n \n self.getTime = function() {\n return tp;\n };\n \n self.getFPS = function() {\n if(dt == 0) { return 0; }\n return 1 / dt;\n };\n \n self.sleep = function() {\n unimplemented(\"love.timer.sleep\"); \n };\n \n self.step = function() {\n var delta = (Date.now() - tp) / 1000;\n dt = Math.max(0, Math.min(dtLimit, delta));\n tp += dt * 1000;\n };\n \n self.nextFrame = function(callback) {\n window.requestAnimationFrame(callback);\n };\n }\n\n return Timer;\n})();;Love = Love || defineLove();\n\nLove.Window = (function() {\n function Window(graphics, event) {\n define(this, graphics);\n \n window.onbeforeunload = function() {\n event.quit();\n };\n \n window.onblur = function() {\n event.push(\"visible\", false);\n event.push(\"mousefocus\", false);\n };\n \n window.onfocus = function() {\n event.push(\"visible\", true);\n event.push(\"mousefocus\", true);\n };\n \n document.oncontextmenu = function(e) {\n e.preventDefault();\n };\n }\n \n function define(self, graphics) {\n var ts = 0;\n var handler = function() {\n var elem = document.fullscreenElement\n || document.mozFullScreenElement\n || document.webkitFullscreenElement\n || document.msFullScreenElement;\n if(elem != Love.element) {\n fullscreen = false;\n self.setFullscreen(false, true);\n } else {\n fullscreen = true;\n }\n };\n document.addEventListener(\"webkitfullscreenchange\", handler, true);\n document.addEventListener(\"mozfullscreenchange\", handler, true);\n document.addEventListener(\"fullscreenchange\", handler, true);\n document.addEventListener(\"MSFullscreenchange\", handler, true);\n \n var fullscreen = false;\n \n self.fromPixels = function() {\n unimplemented(\"love.window.fromPixels\"); \n };\n \n self.getDesktopDimensions = function() {\n return [window.screen.width, window.screen.height];\n };\n \n self.getDimensions = function() {\n return graphics.getDimensions(); \n };\n \n self.getDisplayCount = function() {\n return 1;\n };\n \n self.getDisplayNames = function() {\n return window.document.title; \n };\n \n self.getFullscreen = function() {\n return fullscreen;\n };\n \n self.getFullscreenModes = function() {\n return [ new shine.Table({\n width: window.screen.width,\n height: window.screen.height \n }) ];\n };\n \n self.getHeight = function() {\n return graphics.getHeight(); \n };\n \n self.getIcon = function() {\n unimplemented(\"love.window.getIcon\"); \n };\n \n self.getMode = function() {\n return [self.getWidth(), self.getHeight(), null]; \n };\n \n self.getPixelScale = function() {\n return window.devicePixelRatio; \n };\n \n self.getPosition = function() {\n return [0, 0, 1]; \n };\n \n self.getTitle = function() {\n return window.document.title; \n };\n \n self.getWidth = function() {\n return graphics.getWidth(); \n };\n \n self.hasFocus = function() {\n return document.activeElement == Love.element; \n };\n \n self.hasMouseFocus = function() {\n return document.activeElement == Love.element; \n };\n \n self.isCreated = function() {\n return true; \n };\n \n self.isVisible = function() {\n return true; \n };\n \n self.setFullscreen = function(fs, fromCallback) {\n fromCallback = fromCallback == null ? false : fromCallback;\n if(fs) {\n Love.element.requestFullscreen = Love.element.mozRequestFullScreen\n || Love.element.webkitRequestFullscreen\n || Love.element.msRequestFullscreen\n || Love.element.requestFullscreen;\n document.getElementById(\"fs-text\").setAttribute(\"style\", \"display: block;\");\n document.getElementById(\"fs-btn\").addEventListener(\"click\", function() {\n ts = Date.now();\n Love.element.requestFullscreen();\n var dims = self.getDesktopDimensions();\n Love.element.setAttribute(\"style\", \"width: \" + dims[0] + \"px; height: \" + dims[1] + \"px;\");\n });\n } else {\n document.exitFullscreen = document.exitFullscreen\n || document.mozCancelFullScreen\n || document.webkitExitFullscreen\n || document.msExitFullscreen;\n document.exitFullscreen();\n var dims = self.getDimensions();\n Love.element.setAttribute(\"style\", \"width: \" + dims[0] + \"px; height: \" + dims[1] + \"px;\");\n //Dont remove the message unless the love2d program says to\n if(!fromCallback) {\n document.getElementById(\"fs-text\").setAttribute(\"style\", \"display: none;\");\n document.getElementById(\"fs-btn\").removeEventListener(\"click\", null);\n }\n }\n };\n \n self.setIcon = function() {\n unimplemented(\"love.window.setIcon\"); \n };\n \n self.setMode = function(width, height, flags) {\n graphics.__mainCanvas.setDimensions(width, height);\n if(flags.getMember(\"fullscreen\")) {\n self.setFullscreen(flags.getMember(\"fullscreen\"));\n }\n };\n \n self.setPosition = function(x, y) {\n //Unneeded in JS \n };\n \n self.setTitle = function(title) {\n window.document.title = title; \n };\n \n self.showMessageBox = function(title, message, type, attachtowindow) {\n window.alert(title + \"\\n \" + message); \n };\n \n self.toPixels = function() {\n unimplemented(\"love.window.toPixels\"); \n };\n }\n\n return Window;\n})();\n", "meta": {"content_hash": "51738947df609d3ca9e82f1f2349fc74", "timestamp": "", "source": "github", "line_count": 1722, "max_line_length": 131, "avg_line_length": 29.42624854819977, "alnum_prop": 0.4500710451531418, "repo_name": "brendanfh/love-web", "id": "e7198c6d82261a1f14ccbf82b4ca7c033d76a5a0", "size": "50672", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "js/love.js", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "824"}, {"name": "JavaScript", "bytes": "294458"}, {"name": "Lua", "bytes": "1510"}]}} {"text": "\"\"\"\n Kubernetes\n\n No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)\n\n OpenAPI spec version: v1.6.1\n \n Generated by: https://github.com/swagger-api/swagger-codegen.git\n\"\"\"\n\n\nfrom __future__ import absolute_import\n\nimport os\nimport sys\nimport unittest\n\nimport kubernetes.client\nfrom kubernetes.client.rest import ApiException\nfrom kubernetes.client.models.extensions_v1beta1_deployment_rollback import ExtensionsV1beta1DeploymentRollback\n\n\nclass TestExtensionsV1beta1DeploymentRollback(unittest.TestCase):\n \"\"\" ExtensionsV1beta1DeploymentRollback unit test stubs \"\"\"\n\n def setUp(self):\n pass\n\n def tearDown(self):\n pass\n\n def testExtensionsV1beta1DeploymentRollback(self):\n \"\"\"\n Test ExtensionsV1beta1DeploymentRollback\n \"\"\"\n model = kubernetes.client.models.extensions_v1beta1_deployment_rollback.ExtensionsV1beta1DeploymentRollback()\n\n\nif __name__ == '__main__':\n unittest.main()\n", "meta": {"content_hash": "706052f5d4357e068cdc389fdc624aa2", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 117, "avg_line_length": 25.1, "alnum_prop": 0.7390438247011952, "repo_name": "skuda/client-python", "id": "0a54b2cb12592a727de268f5093fcae0e9a866b1", "size": "1021", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "kubernetes/test/test_extensions_v1beta1_deployment_rollback.py", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Python", "bytes": "5907789"}, {"name": "Shell", "bytes": "8195"}]}} {"text": "\n UserGuideLibrary\n\n", "meta": {"content_hash": "585a3d12bd733597d6a0547746be7cd1", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 53, "avg_line_length": 26.333333333333332, "alnum_prop": 0.7215189873417721, "repo_name": "binarylife/Ydkd", "id": "07be03c67f6be7890f8e8c32517df94f16108a3d", "size": "79", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "library/UserGuide/src/main/res/values/strings.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "902080"}]}} {"text": "package govaluate\n\nimport (\n\t\"bytes\"\n\t\"fmt\"\n\t\"reflect\"\n\t\"testing\"\n\t\"time\"\n\t\"unicode\"\n)\n\n/*\n\tRepresents a test of parsing all tokens correctly from a string\n*/\ntype TokenParsingTest struct {\n\tName string\n\tInput string\n\tFunctions map[string]ExpressionFunction\n\tExpected []ExpressionToken\n}\n\nfunc TestConstantParsing(test *testing.T) {\n\n\ttokenParsingTests := []TokenParsingTest{\n\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single numeric\",\n\t\t\tInput: \"1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single two-digit numeric\",\n\t\t\tInput: \"50\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 50.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Zero\",\n\t\t\tInput: \"0\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 0.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"One digit hex\",\n\t\t\tInput: \"0x1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Two digit hex\",\n\t\t\tInput: \"0x10\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 16.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Hex with lowercase\",\n\t\t\tInput: \"0xabcdef\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 11259375.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Hex with uppercase\",\n\t\t\tInput: \"0xABCDEF\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 11259375.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single string\",\n\t\t\tInput: \"'foo'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single time, RFC3339, only date\",\n\t\t\tInput: \"'2014-01-02'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: TIME,\n\t\t\t\t\tValue: time.Date(2014, time.January, 2, 0, 0, 0, 0, time.Local),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single time, RFC3339, with hh:mm\",\n\t\t\tInput: \"'2014-01-02 14:12'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: TIME,\n\t\t\t\t\tValue: time.Date(2014, time.January, 2, 14, 12, 0, 0, time.Local),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single time, RFC3339, with hh:mm:ss\",\n\t\t\tInput: \"'2014-01-02 14:12:22'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: TIME,\n\t\t\t\t\tValue: time.Date(2014, time.January, 2, 14, 12, 22, 0, time.Local),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single boolean\",\n\t\t\tInput: \"true\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: true,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single large numeric\",\n\t\t\tInput: \"1234567890\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1234567890.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single floating-point\",\n\t\t\tInput: \"0.5\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 0.5,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single large floating point\",\n\t\t\tInput: \"3.14567471\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 3.14567471,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single false boolean\",\n\t\t\tInput: \"false\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: false,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Single internationalized string\",\n\t\t\tInput: \"'\u00c6\u0166\u01fd\u0d08\u16a5\u0b87\u0e04\u0678'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"\u00c6\u0166\u01fd\u0d08\u16a5\u0b87\u0e04\u0678\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Single internationalized parameter\",\n\t\t\tInput: \"\u00c6\u0166\u01fd\u0d08\u16a5\u0b87\u0e04\u0678\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"\u00c6\u0166\u01fd\u0d08\u16a5\u0b87\u0e04\u0678\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Parameterless function\",\n\t\t\tInput: \"foo()\",\n\t\t\tFunctions: map[string]ExpressionFunction{\"foo\": noop},\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: FUNCTION,\n\t\t\t\t\tValue: noop,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Single parameter function\",\n\t\t\tInput: \"foo('bar')\",\n\t\t\tFunctions: map[string]ExpressionFunction{\"foo\": noop},\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: FUNCTION,\n\t\t\t\t\tValue: noop,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"bar\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Multiple parameter function\",\n\t\t\tInput: \"foo('bar', 1.0)\",\n\t\t\tFunctions: map[string]ExpressionFunction{\"foo\": noop},\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: FUNCTION,\n\t\t\t\t\tValue: noop,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"bar\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: SEPARATOR,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Nested function\",\n\t\t\tInput: \"foo(foo('bar'), 1.0, foo(2.0))\",\n\t\t\tFunctions: map[string]ExpressionFunction{\"foo\": noop},\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: FUNCTION,\n\t\t\t\t\tValue: noop,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: FUNCTION,\n\t\t\t\t\tValue: noop,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"bar\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: SEPARATOR,\n\t\t\t\t},\n\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: SEPARATOR,\n\t\t\t\t},\n\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: FUNCTION,\n\t\t\t\t\tValue: noop,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 2.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Function with modifier afterwards (#28)\",\n\t\t\tInput: \"foo() + 1\",\n\t\t\tFunctions: map[string]ExpressionFunction{\"foo\": noop},\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: FUNCTION,\n\t\t\t\t\tValue: noop,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"+\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Function with modifier afterwards and comparator\",\n\t\t\tInput: \"(foo()-1) > 3\",\n\t\t\tFunctions: map[string]ExpressionFunction{\"foo\": noop},\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: FUNCTION,\n\t\t\t\t\tValue: noop,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"-\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \">\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 3.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Double-quoted string added to square-brackted param (#59)\",\n\t\t\tInput: \"\\\"a\\\" + [foo]\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"a\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"+\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Accessor variable\",\n\t\t\tInput: \"foo.Var\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: ACCESSOR,\n\t\t\t\t\tValue: []string{\"foo\", \"Var\"},\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\t\t\tName: \"Accessor function\",\n\t\t\tInput: \"foo.Operation()\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: ACCESSOR,\n\t\t\t\t\tValue: []string{\"foo\", \"Operation\"},\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\ttokenParsingTests = combineWhitespaceExpressions(tokenParsingTests)\n\trunTokenParsingTest(tokenParsingTests, test)\n}\n\nfunc TestLogicalOperatorParsing(test *testing.T) {\n\n\ttokenParsingTests := []TokenParsingTest{\n\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Boolean AND\",\n\t\t\tInput: \"true && false\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: true,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: LOGICALOP,\n\t\t\t\t\tValue: \"&&\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: false,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Boolean OR\",\n\t\t\tInput: \"true || false\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: true,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: LOGICALOP,\n\t\t\t\t\tValue: \"||\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: false,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Multiple logical operators\",\n\t\t\tInput: \"true || false && true\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: true,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: LOGICALOP,\n\t\t\t\t\tValue: \"||\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: false,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: LOGICALOP,\n\t\t\t\t\tValue: \"&&\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: true,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\ttokenParsingTests = combineWhitespaceExpressions(tokenParsingTests)\n\trunTokenParsingTest(tokenParsingTests, test)\n}\n\nfunc TestComparatorParsing(test *testing.T) {\n\n\ttokenParsingTests := []TokenParsingTest{\n\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric EQ\",\n\t\t\tInput: \"1 == 2\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"==\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 2.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric NEQ\",\n\t\t\tInput: \"1 != 2\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"!=\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 2.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric GT\",\n\t\t\tInput: \"1 > 0\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \">\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 0.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric LT\",\n\t\t\tInput: \"1 < 2\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"<\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 2.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric GTE\",\n\t\t\tInput: \"1 >= 2\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \">=\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 2.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric LTE\",\n\t\t\tInput: \"1 <= 2\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"<=\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 2.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"String LT\",\n\t\t\tInput: \"'ab.cd' < 'abc.def'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"ab.cd\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"<\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"abc.def\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"String LTE\",\n\t\t\tInput: \"'ab.cd' <= 'abc.def'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"ab.cd\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"<=\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"abc.def\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"String GT\",\n\t\t\tInput: \"'ab.cd' > 'abc.def'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"ab.cd\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \">\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"abc.def\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"String GTE\",\n\t\t\tInput: \"'ab.cd' >= 'abc.def'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"ab.cd\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \">=\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"abc.def\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"String REQ\",\n\t\t\tInput: \"'foobar' =~ 'bar'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foobar\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"=~\",\n\t\t\t\t},\n\n\t\t\t\t// it's not particularly clean to test for the contents of a pattern, (since it means modifying the harness below)\n\t\t\t\t// so pattern contents are left untested.\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: PATTERN,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"String NREQ\",\n\t\t\tInput: \"'foobar' !~ 'bar'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foobar\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"!~\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: PATTERN,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Comparator against modifier string additive (#22)\",\n\t\t\tInput: \"'foo' == '+'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"==\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"+\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Comparator against modifier string multiplicative (#22)\",\n\t\t\tInput: \"'foo' == '/'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"==\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"/\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Comparator against modifier string exponential (#22)\",\n\t\t\tInput: \"'foo' == '**'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"==\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"**\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Comparator against modifier string bitwise (#22)\",\n\t\t\tInput: \"'foo' == '^'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"==\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"^\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Comparator against modifier string shift (#22)\",\n\t\t\tInput: \"'foo' == '>>'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"==\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \">>\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Comparator against modifier string ternary (#22)\",\n\t\t\tInput: \"'foo' == '?'\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"==\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"?\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Array membership lowercase\",\n\t\t\tInput: \"'foo' in ('foo', 'bar')\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"in\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: SEPARATOR,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"bar\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Array membership uppercase\",\n\t\t\tInput: \"'foo' IN ('foo', 'bar')\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"in\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: SEPARATOR,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"bar\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: CLAUSE_CLOSE,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\ttokenParsingTests = combineWhitespaceExpressions(tokenParsingTests)\n\trunTokenParsingTest(tokenParsingTests, test)\n}\n\nfunc TestModifierParsing(test *testing.T) {\n\n\ttokenParsingTests := []TokenParsingTest{\n\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric PLUS\",\n\t\t\tInput: \"1 + 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"+\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric MINUS\",\n\t\t\tInput: \"1 - 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"-\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric MULTIPLY\",\n\t\t\tInput: \"1 * 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"*\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric DIVIDE\",\n\t\t\tInput: \"1 / 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"/\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric MODULUS\",\n\t\t\tInput: \"1 % 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"%\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric BITWISE_AND\",\n\t\t\tInput: \"1 & 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"&\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric BITWISE_OR\",\n\t\t\tInput: \"1 | 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"|\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric BITWISE_XOR\",\n\t\t\tInput: \"1 ^ 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"^\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric BITWISE_LSHIFT\",\n\t\t\tInput: \"1 << 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \"<<\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Numeric BITWISE_RSHIFT\",\n\t\t\tInput: \"1 >> 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: MODIFIER,\n\t\t\t\t\tValue: \">>\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\ttokenParsingTests = combineWhitespaceExpressions(tokenParsingTests)\n\trunTokenParsingTest(tokenParsingTests, test)\n}\n\nfunc TestPrefixParsing(test *testing.T) {\n\n\ttestCases := []TokenParsingTest{\n\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Sign prefix\",\n\t\t\tInput: \"-1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: PREFIX,\n\t\t\t\t\tValue: \"-\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Sign prefix on variable\",\n\t\t\tInput: \"-foo\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: PREFIX,\n\t\t\t\t\tValue: \"-\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Boolean prefix\",\n\t\t\tInput: \"!true\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: PREFIX,\n\t\t\t\t\tValue: \"!\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: true,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Boolean prefix on variable\",\n\t\t\tInput: \"!foo\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: PREFIX,\n\t\t\t\t\tValue: \"!\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Bitwise not prefix\",\n\t\t\tInput: \"~1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: PREFIX,\n\t\t\t\t\tValue: \"~\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Bitwise not prefix on variable\",\n\t\t\tInput: \"~foo\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: PREFIX,\n\t\t\t\t\tValue: \"~\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\ttestCases = combineWhitespaceExpressions(testCases)\n\trunTokenParsingTest(testCases, test)\n}\n\nfunc TestEscapedParameters(test *testing.T) {\n\n\ttestCases := []TokenParsingTest{\n\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single escaped parameter\",\n\t\t\tInput: \"[foo]\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single escaped parameter with whitespace\",\n\t\t\tInput: \"[foo bar]\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo bar\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Single escaped parameter with escaped closing bracket\",\n\t\t\tInput: \"[foo[bar\\\\]]\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo[bar]\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Escaped parameters and unescaped parameters\",\n\t\t\tInput: \"[foo] > bar\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \">\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"bar\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Unescaped parameter with space\",\n\t\t\tInput: \"foo\\\\ bar > bar\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo bar\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \">\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"bar\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Unescaped parameter with space\",\n\t\t\tInput: \"response\\\\-time > bar\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"response-time\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \">\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"bar\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Parameters with snake_case\",\n\t\t\tInput: \"foo_bar > baz_quux\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"foo_bar\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \">\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: VARIABLE,\n\t\t\t\t\tValue: \"baz_quux\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"String literal uses backslash to escape\",\n\t\t\tInput: \"\\\"foo\\\\'bar\\\"\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: STRING,\n\t\t\t\t\tValue: \"foo'bar\",\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\trunTokenParsingTest(testCases, test)\n}\n\nfunc TestTernaryParsing(test *testing.T) {\n\ttokenParsingTests := []TokenParsingTest{\n\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Ternary after Boolean\",\n\t\t\tInput: \"true ? 1\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: true,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: TERNARY,\n\t\t\t\t\tValue: \"?\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Ternary after Comperator\",\n\t\t\tInput: \"1 == 0 ? true\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: COMPARATOR,\n\t\t\t\t\tValue: \"==\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 0.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: TERNARY,\n\t\t\t\t\tValue: \"?\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: BOOLEAN,\n\t\t\t\t\tValue: true,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tTokenParsingTest{\n\n\t\t\tName: \"Null coalesce left\",\n\t\t\tInput: \"1 ?? 2\",\n\t\t\tExpected: []ExpressionToken{\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 1.0,\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: TERNARY,\n\t\t\t\t\tValue: \"??\",\n\t\t\t\t},\n\t\t\t\tExpressionToken{\n\t\t\t\t\tKind: NUMERIC,\n\t\t\t\t\tValue: 2.0,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t}\n\n\trunTokenParsingTest(tokenParsingTests, test)\n}\n\n/*\n\tTests to make sure that the String() reprsentation of an expression exactly matches what is given to the parse function.\n*/\nfunc TestOriginalString(test *testing.T) {\n\n\t// include all the token types, to be sure there's no shenaniganery going on.\n\texpressionString := \"2 > 1 &&\" +\n\t\t\"'something' != 'nothing' || \" +\n\t\t\"'2014-01-20' < 'Wed Jul 8 23:07:35 MDT 2015' && \" +\n\t\t\"[escapedVariable name with spaces] <= unescaped\\\\-variableName &&\" +\n\t\t\"modifierTest + 1000 / 2 > (80 * 100 % 2) && true ? true : false\"\n\n\texpression, err := NewEvaluableExpression(expressionString)\n\tif err != nil {\n\n\t\ttest.Logf(\"failed to parse original string test: %v\", err)\n\t\ttest.Fail()\n\t\treturn\n\t}\n\n\tif expression.String() != expressionString {\n\t\ttest.Logf(\"String() did not give the same expression as given to parse\")\n\t\ttest.Fail()\n\t}\n}\n\n/*\n\tTests to make sure that the Vars() reprsentation of an expression identifies all variables contained within the expression.\n*/\nfunc TestOriginalVars(test *testing.T) {\n\n\t// include all the token types, to be sure there's no shenaniganery going on.\n\texpressionString := \"2 > 1 &&\" +\n\t\t\"'something' != 'nothing' || \" +\n\t\t\"'2014-01-20' < 'Wed Jul 8 23:07:35 MDT 2015' && \" +\n\t\t\"[escapedVariable name with spaces] <= unescaped\\\\-variableName &&\" +\n\t\t\"modifierTest + 1000 / 2 > (80 * 100 % 2) && true ? true : false\"\n\n\texpectedVars := [3]string{\"escapedVariable name with spaces\",\n\t\t\"modifierTest\",\n\t\t\"unescaped-variableName\"}\n\n\texpression, err := NewEvaluableExpression(expressionString)\n\tif err != nil {\n\n\t\ttest.Logf(\"failed to parse original var test: %v\", err)\n\t\ttest.Fail()\n\t\treturn\n\t}\n\n\tif len(expression.Vars()) == len(expectedVars) {\n\t\tvariableMap := make(map[string]string)\n\t\tfor _, v := range expression.Vars() {\n\t\t\tvariableMap[v] = v\n\t\t}\n\t\tfor _, v := range expectedVars {\n\t\t\tif _, ok := variableMap[v]; !ok {\n\t\t\t\ttest.Logf(\"Vars() did not correctly identify all variables contained within the expression\")\n\t\t\t\ttest.Fail()\n\t\t\t}\n\t\t}\n\t} else {\n\t\ttest.Logf(\"Vars() did not correctly identify all variables contained within the expression\")\n\t\ttest.Fail()\n\t}\n}\n\nfunc combineWhitespaceExpressions(testCases []TokenParsingTest) []TokenParsingTest {\n\n\tvar currentCase, strippedCase TokenParsingTest\n\tvar caseLength int\n\n\tcaseLength = len(testCases)\n\n\tfor i := 0; i < caseLength; i++ {\n\n\t\tcurrentCase = testCases[i]\n\n\t\tstrippedCase = TokenParsingTest{\n\n\t\t\tName: (currentCase.Name + \" (without whitespace)\"),\n\t\t\tInput: stripUnquotedWhitespace(currentCase.Input),\n\t\t\tExpected: currentCase.Expected,\n\t\t\tFunctions: currentCase.Functions,\n\t\t}\n\n\t\ttestCases = append(testCases, strippedCase, currentCase)\n\t}\n\n\treturn testCases\n}\n\nfunc stripUnquotedWhitespace(expression string) string {\n\n\tvar expressionBuffer bytes.Buffer\n\tvar quoted bool\n\n\tfor _, character := range expression {\n\n\t\tif !quoted && unicode.IsSpace(character) {\n\t\t\tcontinue\n\t\t}\n\n\t\tif character == '\\'' {\n\t\t\tquoted = !quoted\n\t\t}\n\n\t\texpressionBuffer.WriteString(string(character))\n\t}\n\n\treturn expressionBuffer.String()\n}\n\nfunc runTokenParsingTest(tokenParsingTests []TokenParsingTest, test *testing.T) {\n\n\tvar parsingTest TokenParsingTest\n\tvar expression *EvaluableExpression\n\tvar actualTokens []ExpressionToken\n\tvar actualToken ExpressionToken\n\tvar expectedTokenKindString, actualTokenKindString string\n\tvar expectedTokenLength, actualTokenLength int\n\tvar err error\n\n\tfmt.Printf(\"Running %d parsing test cases...\\n\", len(tokenParsingTests))\n\t// defer func() {\n\t// if r := recover(); r != nil {\n\t// test.Logf(\"Panic in test '%s': %v\", parsingTest.Name, r)\n\t// \t\ttest.Fail()\n\t// }\n\t// }()\n\n\t// Run the test cases.\n\tfor _, parsingTest = range tokenParsingTests {\n\n\t\tif parsingTest.Functions != nil {\n\t\t\texpression, err = NewEvaluableExpressionWithFunctions(parsingTest.Input, parsingTest.Functions)\n\t\t} else {\n\t\t\texpression, err = NewEvaluableExpression(parsingTest.Input)\n\t\t}\n\n\t\tif err != nil {\n\n\t\t\ttest.Logf(\"Test '%s' failed to parse: %s\", parsingTest.Name, err)\n\t\t\ttest.Logf(\"Expression: '%s'\", parsingTest.Input)\n\t\t\ttest.Fail()\n\t\t\tcontinue\n\t\t}\n\n\t\tactualTokens = expression.Tokens()\n\n\t\texpectedTokenLength = len(parsingTest.Expected)\n\t\tactualTokenLength = len(actualTokens)\n\n\t\tif actualTokenLength != expectedTokenLength {\n\n\t\t\ttest.Logf(\"Test '%s' failed:\", parsingTest.Name)\n\t\t\ttest.Logf(\"Expected %d tokens, actually found %d\", expectedTokenLength, actualTokenLength)\n\t\t\ttest.Fail()\n\t\t\tcontinue\n\t\t}\n\n\t\tfor i, expectedToken := range parsingTest.Expected {\n\n\t\t\tactualToken = actualTokens[i]\n\t\t\tif actualToken.Kind != expectedToken.Kind {\n\n\t\t\t\tactualTokenKindString = actualToken.Kind.String()\n\t\t\t\texpectedTokenKindString = expectedToken.Kind.String()\n\n\t\t\t\ttest.Logf(\"Test '%s' failed:\", parsingTest.Name)\n\t\t\t\ttest.Logf(\"Expected token kind '%v' does not match '%v'\", expectedTokenKindString, actualTokenKindString)\n\t\t\t\ttest.Fail()\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif expectedToken.Value == nil {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\treflectedKind := reflect.TypeOf(expectedToken.Value).Kind()\n\t\t\tif reflectedKind == reflect.Func {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\t// gotta be an accessor\n\t\t\tif reflectedKind == reflect.Slice {\n\n\t\t\t\tif actualToken.Value == nil {\n\t\t\t\t\ttest.Logf(\"Test '%s' failed:\", parsingTest.Name)\n\t\t\t\t\ttest.Logf(\"Expected token value '%v' does not match nil\", expectedToken.Value)\n\t\t\t\t\ttest.Fail()\n\t\t\t\t}\n\n\t\t\t\tfor z, actual := range actualToken.Value.([]string) {\n\n\t\t\t\t\tif actual != expectedToken.Value.([]string)[z] {\n\n\t\t\t\t\t\ttest.Logf(\"Test '%s' failed:\", parsingTest.Name)\n\t\t\t\t\t\ttest.Logf(\"Expected token value '%v' does not match '%v'\", expectedToken.Value, actualToken.Value)\n\t\t\t\t\t\ttest.Fail()\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif actualToken.Value != expectedToken.Value {\n\n\t\t\t\ttest.Logf(\"Test '%s' failed:\", parsingTest.Name)\n\t\t\t\ttest.Logf(\"Expected token value '%v' does not match '%v'\", expectedToken.Value, actualToken.Value)\n\t\t\t\ttest.Fail()\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\t}\n}\n\nfunc noop(arguments ...interface{}) (interface{}, error) {\n\treturn nil, nil\n}\n", "meta": {"content_hash": "2f197242f4e79a647b8c0f4765e351d5", "timestamp": "", "source": "github", "line_count": 1670, "max_line_length": 124, "avg_line_length": 19.104191616766467, "alnum_prop": 0.582246740220662, "repo_name": "Knetic/govaluate", "id": "d57b80967d3223c5feb5a32e4d61b88fe1ea23d8", "size": "31952", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "parsing_test.go", "mode": "33188", "license": "mit", "language": [{"name": "Go", "bytes": "177418"}, {"name": "Shell", "bytes": "760"}]}} {"text": "\n\n\n\n\n\n\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\n\n\t\n\n\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\n\n\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\n\n\t\n\t\t\n\t\t\t\n\t\t\t\t\n\t\t\t\n\t\t\n\t\n\n\n\t\n\t\t\n\t\n\n\t\n\n\t\n\n\t\n\t\t\n\t\n\n\t\n\t\t\n\t\n\n\t\n\t\t\n\t\n\n\t\n\t\t\n\t\t\n\t\t\n\t\n\t\n\t\n\n\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\t\n\t\n\n\t\n\n\t\n\t\t\n\t\t\t\n\t\t\t\t\n\t\t\t\t\n\t\t\t\t\n\t\t\t\n\t\t\n\t\n\n\n", "meta": {"content_hash": "84a595ed63f81e80daa1d2cf1e760b5c", "timestamp": "", "source": "github", "line_count": 121, "max_line_length": 157, "avg_line_length": 45.83471074380165, "alnum_prop": 0.7601875225387666, "repo_name": "levioZ/springSecurityOauth2", "id": "885eaeb627d6ca89f96e72a4ff6510ae8859fc00", "size": "5546", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "appDemo/target/appDemo/WEB-INF/applicationContext-security.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "25367"}]}} {"text": "FROM balenalib/solidrun-imx6-ubuntu:eoan-build\n\nENV NODE_VERSION 14.15.4\nENV YARN_VERSION 1.22.4\n\nRUN for key in \\\n\t6A010C5166006599AA17F08146C2130DFD2497F5 \\\n\t; do \\\n\t\tgpg --keyserver pgp.mit.edu --recv-keys \"$key\" || \\\n\t\tgpg --keyserver keyserver.pgp.com --recv-keys \"$key\" || \\\n\t\tgpg --keyserver ha.pool.sks-keyservers.net --recv-keys \"$key\" ; \\\n\tdone \\\n\t&& curl -SLO \"http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz\" \\\n\t&& echo \"ffce90b07675434491361dfc74eee230f9ffc65c6c08efb88a18781bcb931871 node-v$NODE_VERSION-linux-armv7l.tar.gz\" | sha256sum -c - \\\n\t&& tar -xzf \"node-v$NODE_VERSION-linux-armv7l.tar.gz\" -C /usr/local --strip-components=1 \\\n\t&& rm \"node-v$NODE_VERSION-linux-armv7l.tar.gz\" \\\n\t&& curl -fSLO --compressed \"https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz\" \\\n\t&& curl -fSLO --compressed \"https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc\" \\\n\t&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \\\n\t&& mkdir -p /opt/yarn \\\n\t&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \\\n\t&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \\\n\t&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \\\n\t&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \\\n\t&& npm config set unsafe-perm true -g --unsafe-perm \\\n\t&& rm -rf /tmp/*\n\nCMD [\"echo\",\"'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs\"]\n\n RUN curl -SLO \"https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/test-stack@node.sh\" \\\n && echo \"Running test-stack@node\" \\\n && chmod +x test-stack@node.sh \\\n && bash test-stack@node.sh \\\n && rm -rf test-stack@node.sh \n\nRUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \\nArchitecture: ARM v7 \\nOS: Ubuntu eoan \\nVariant: build variant \\nDefault variable(s): UDEV=off \\nThe following software stack is preinstalled: \\nNode.js v14.15.4, Yarn v1.22.4 \\nExtra features: \\n- Easy way to install packages with `install_packages ` command \\n- Run anywhere with cross-build feature (for ARM only) \\n- Keep the container idling with `balena-idle` command \\n- Show base image details with `balena-info` command' > /.balena/messages/image-info\n\nRUN echo '#!/bin/sh.real\\nbalena-info\\nrm -f /bin/sh\\ncp /bin/sh.real /bin/sh\\n/bin/sh \"$@\"' > /bin/sh-shim \\\n\t&& chmod +x /bin/sh-shim \\\n\t&& cp /bin/sh /bin/sh.real \\\n\t&& mv /bin/sh-shim /bin/sh", "meta": {"content_hash": "c8a01e4b75bd5de09af8b5745a3f2309", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 692, "avg_line_length": 66.85365853658537, "alnum_prop": 0.7099598686610726, "repo_name": "nghiant2710/base-images", "id": "ce2d4d127f7bb93b26d59fbf176b02e348c9ccd4", "size": "2762", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/node/solidrun-imx6/ubuntu/eoan/14.15.4/build/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Dockerfile", "bytes": "144558581"}, {"name": "JavaScript", "bytes": "16316"}, {"name": "Shell", "bytes": "368690"}]}} {"text": "var Sources = $CL.namespace('SqlConnect.View.Sources');\n\n$CL.require('Cl.Backbone.BlockingView');\n\nSources.Options = function() {};\n\nSources.Options = $CL.extendClass(Sources.Options, Cl.Backbone.BlockingView, {\n render : function() {\n this.parent.prototype.render.apply(this);\n\n if (!$CL.isEmpty(this.data.options)) {\n this.$el.find('select[name=sqlconnect-source-count-column]').val(this.data.options.countColumn);\n \n if (this.data.options.customSql) {\n this.$el.find('textarea[name=sqlconnect-source-custom-sql]').val(this.data.options.customSql);\n }\n }\n }\n});\n", "meta": {"content_hash": "ee72d599010347b3f988d5d873d71c76", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 110, "avg_line_length": 34.21052631578947, "alnum_prop": 0.6276923076923077, "repo_name": "codeliner/ginger-ims", "id": "081d1dc6469d2b881b15d969899987621caacf57", "size": "650", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "module/SqlConnect/src/SqlConnect/Javascript/View/Sources/Options.js", "mode": "33261", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "84729"}, {"name": "JavaScript", "bytes": "265425"}, {"name": "PHP", "bytes": "523207"}, {"name": "Shell", "bytes": "809"}]}} {"text": "include(FindPackageHandleStandardArgs)\n\nfind_path(FFTW_INCLUDE_DIR\n NAMES fftw3.h\n PATH_SUFFIXES include include/fftw\n HINTS ENV MKLROOT\n HINTS ENV FFTWROOT\n HINTS ENV FFTW_INC\n )\n\nfind_library(FFTW_LIBRARIES\n NAMES fftw3\n PATH_SUFFIXES lib\n HINTS ENV MKLROOT\n HINTS ENV FFTW_DIR\n HINTS ENV FFTWROOT\n )\n\nset(FFTW_INCLUDE_DIRS ${FFTW_INCLUDE_DIR})\n\nif(FFTW_LIBRARIES MATCHES \"NOTFOUND\")\n # ok, fftw libraries not found.\n # MKL contains fftw, lets assume we use MKL\n # TODO: handle this properly\n set(FFTW_LIBRARIES \"\")\n find_package_handle_standard_args(FFTW\n REQUIRED_VARS FFTW_INCLUDE_DIR )\n mark_as_advanced(FFTW_FOUND FFTW_INCLUDE_DIR)\nelse()\n find_package_handle_standard_args(FFTW\n REQUIRED_VARS FFTW_INCLUDE_DIR FFTW_LIBRARIES)\n mark_as_advanced(FFTW_FOUND FFTW_INCLUDE_DIR FFTW_LIBRARIES)\nendif()\n", "meta": {"content_hash": "48fa9a8857e7df31546b884bdba04a16", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 62, "avg_line_length": 25.151515151515152, "alnum_prop": 0.7506024096385542, "repo_name": "electronic-structure/sirius", "id": "d261546f9ec8feb2a9b4a3d4b1bfb6ff23712246", "size": "830", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "cmake/modules/FindFFTW.cmake", "mode": "33188", "license": "bsd-2-clause", "language": [{"name": "C", "bytes": "80256"}, {"name": "C++", "bytes": "2742135"}, {"name": "CSS", "bytes": "99478"}, {"name": "Cuda", "bytes": "85058"}, {"name": "FORTRAN", "bytes": "904229"}, {"name": "Groff", "bytes": "3046"}, {"name": "HTML", "bytes": "25527"}, {"name": "JavaScript", "bytes": "1451"}, {"name": "Makefile", "bytes": "17141"}, {"name": "PHP", "bytes": "2203"}, {"name": "Python", "bytes": "343602"}, {"name": "Shell", "bytes": "314"}]}} {"text": "\n\n\n\n\n\nUses of Package com.fasterxml.jackson.datatype.jsr310.ser.key (Jackson datatype: JSR310 2.9.0.pr1 API)\n\n\n\n\n\n\n\n\n\n
\n
    \n
  • Prev
  • \n
  • Next
  • \n
\n\n\n
\n\n
\n\n\n
\n\n
\n

Uses of Package
com.fasterxml.jackson.datatype.jsr310.ser.key

\n
\n
\n\n
\n\n\n
\n
    \n
  • Prev
  • \n
  • Next
  • \n
\n\n\n
\n\n
\n\n\n
\n\n

Copyright © 2017 FasterXML. All rights reserved.

\n\n\n", "meta": {"content_hash": "e92b46947571eda7b2ef917761e295fe", "timestamp": "", "source": "github", "line_count": 159, "max_line_length": 380, "avg_line_length": 39.0251572327044, "alnum_prop": 0.6288477034649477, "repo_name": "kevinjom/jackson-modules-java8", "id": "14578e2fabfd180c6f49236394292c4badeba137", "size": "6205", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "docs/javadoc/datetime/2.9.pr1/com/fasterxml/jackson/datatype/jsr310/ser/key/package-use.html", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "699284"}]}} {"text": "#ifndef _TEST_H_\n#define _TEST_H_\n\n/********************* Header Files ***********************/\n/* C Headers */\n//#include \n//#include \n//#include /* atof, rand, malloc... */\n//#include /* size_t, NULL */\n//#include /* Variable argument functions */\n//#include /* Character check functions */\n//#include \n//#include \n//$include \n//$include /* C11, standard u_int16 & such */\n\n/* Project Headers */\n\n/******************* Constants/Macros *********************/\n\n\n/******************* Type Declarations ********************/\n/* For enums: Try to namesapce the common elements.\n * typedef enum {\n *\tVAL_,\n * } name_e;\n */\n\n/* For structs:\n * typedef struct name_s {\n *\tint index;\n * } name_t;\n */\n\n/********************** Prototypes ************************/\n\n#endif /* _TEST_H_ */\n\n", "meta": {"content_hash": "882e76a4f5e8c4327fceaa481c14f98f", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 60, "avg_line_length": 23.18421052631579, "alnum_prop": 0.49148694665153236, "repo_name": "starcraftman/.my_scripts", "id": "476ec6e0fb6258f7ca90bf4d60991d40b24ca43c", "size": "881", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "bin/templates/c_template.h", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "914"}, {"name": "C++", "bytes": "4884"}, {"name": "Common Lisp", "bytes": "3217"}, {"name": "Makefile", "bytes": "14820"}, {"name": "Objective-C", "bytes": "881"}, {"name": "Perl", "bytes": "14953"}, {"name": "Python", "bytes": "87094"}, {"name": "Ruby", "bytes": "11415"}, {"name": "Shell", "bytes": "15604"}]}} {"text": "\npackage org.owasp.webgoat.plugin.db_cross_site;\n\nimport org.owasp.webgoat.plugin.GoatHillsFinancial.DefaultLessonAction;\nimport org.owasp.webgoat.plugin.GoatHillsFinancial.Employee;\nimport org.owasp.webgoat.plugin.GoatHillsFinancial.GoatHillsFinancial;\nimport org.owasp.webgoat.plugin.GoatHillsFinancial.LessonAction;\nimport org.owasp.webgoat.session.ParameterNotFoundException;\nimport org.owasp.webgoat.session.UnauthenticatedException;\nimport org.owasp.webgoat.session.UnauthorizedException;\nimport org.owasp.webgoat.session.ValidationException;\nimport org.owasp.webgoat.session.WebSession;\n\nimport javax.servlet.http.HttpServletRequest;\nimport java.sql.CallableStatement;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.sql.Statement;\n\n\n\npublic class UpdateProfileDBCrossSiteScripting extends DefaultLessonAction\n{\n\n private LessonAction chainedAction;\n\n public UpdateProfileDBCrossSiteScripting(GoatHillsFinancial lesson, String lessonName, String actionName, LessonAction chainedAction)\n {\n super(lesson, lessonName, actionName);\n this.chainedAction = chainedAction;\n }\n\n public void handleRequest(WebSession s) throws ParameterNotFoundException, UnauthenticatedException,\n UnauthorizedException, ValidationException\n {\n if (isAuthenticated(s))\n {\n int userId = getIntSessionAttribute(s, getLessonName() + \".\" + GoatHillsFinancial.USER_ID);\n\n HttpServletRequest request = s.getRequest();\n int subjectId = Integer.parseInt(request.getParameter(DBCrossSiteScripting.EMPLOYEE_ID));\n String firstName = request.getParameter(DBCrossSiteScripting.FIRST_NAME);\n String lastName = request.getParameter(DBCrossSiteScripting.LAST_NAME);\n String ssn = request.getParameter(DBCrossSiteScripting.SSN);\n String title = request.getParameter(DBCrossSiteScripting.TITLE);\n String phone = request.getParameter(DBCrossSiteScripting.PHONE_NUMBER);\n String address1 = request.getParameter(DBCrossSiteScripting.ADDRESS1);\n String address2 = request.getParameter(DBCrossSiteScripting.ADDRESS2);\n int manager = Integer.parseInt(request.getParameter(DBCrossSiteScripting.MANAGER));\n String startDate = request.getParameter(DBCrossSiteScripting.START_DATE);\n int salary = Integer.parseInt(request.getParameter(DBCrossSiteScripting.SALARY));\n String ccn = request.getParameter(DBCrossSiteScripting.CCN);\n int ccnLimit = Integer.parseInt(request.getParameter(DBCrossSiteScripting.CCN_LIMIT));\n String disciplinaryActionDate = request.getParameter(DBCrossSiteScripting.DISCIPLINARY_DATE);\n String disciplinaryActionNotes = request.getParameter(DBCrossSiteScripting.DISCIPLINARY_NOTES);\n String personalDescription = request.getParameter(DBCrossSiteScripting.DESCRIPTION);\n\n Employee employee = new Employee(subjectId, firstName, lastName, ssn, title, phone, address1, address2,\n manager, startDate, salary, ccn, ccnLimit, disciplinaryActionDate, disciplinaryActionNotes,\n personalDescription);\n\n try\n {\n if (subjectId > 0)\n {\n this.changeEmployeeProfile(s, userId, subjectId, employee);\n setRequestAttribute(s, getLessonName() + \".\" + DBCrossSiteScripting.EMPLOYEE_ID, Integer\n .toString(subjectId));\n if (DBCrossSiteScripting.STAGE1.equals(getStage(s)))\n {\n address1 = address1.toLowerCase();\n boolean pass = address1.contains(\"\");\n if (pass)\n {\n setStageComplete(s, DBCrossSiteScripting.STAGE1);\n }\n }\n }\n else\n this.createEmployeeProfile(s, userId, employee);\n } catch (SQLException e)\n {\n s.setMessage(\"Error updating employee profile\");\n e.printStackTrace();\n if (DBCrossSiteScripting.STAGE2.equals(getStage(s))\n && (e.getMessage().contains(\"ORA-06512\") || e.getMessage().contains(\"Illegal characters\"))\n && !employee.getAddress1().matches(\"^[a-zA-Z0-9,\\\\. ]{0,80}$\"))\n {\n setStageComplete(s, DBCrossSiteScripting.STAGE2);\n }\n\n }\n\n try\n {\n chainedAction.handleRequest(s);\n } catch (UnauthenticatedException ue1)\n {\n // System.out.println(\"Internal server error\");\n ue1.printStackTrace();\n } catch (UnauthorizedException ue2)\n {\n // System.out.println(\"Internal server error\");\n ue2.printStackTrace();\n }\n }\n else\n throw new UnauthenticatedException();\n }\n\n public String getNextPage(WebSession s)\n {\n return DBCrossSiteScripting.VIEWPROFILE_ACTION;\n }\n\n public void changeEmployeeProfile(WebSession s, int userId, int subjectId, Employee employee) throws SQLException\n {\n String update = \" { CALL UPDATE_EMPLOYEE(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?) }\";\n CallableStatement call = WebSession.getConnection(s).prepareCall(update);\n // Note: The password field is ONLY set by ChangePassword\n call.setInt(1, userId);\n call.setString(2, employee.getFirstName());\n call.setString(3, employee.getLastName());\n call.setString(4, employee.getSsn());\n call.setString(5, employee.getTitle());\n call.setString(6, employee.getPhoneNumber());\n call.setString(7, employee.getAddress1());\n call.setString(8, employee.getAddress2());\n call.setInt(9, employee.getManager());\n call.setString(10, employee.getStartDate());\n call.setInt(11, employee.getSalary());\n call.setString(12, employee.getCcn());\n call.setInt(13, employee.getCcnLimit());\n call.setString(14, employee.getDisciplinaryActionDate());\n call.setString(15, employee.getDisciplinaryActionNotes());\n call.setString(16, employee.getPersonalDescription());\n call.executeUpdate();\n }\n\n public void createEmployeeProfile(WebSession s, int userId, Employee employee) throws UnauthorizedException\n {\n try\n {\n int nextId = getNextUID(s);\n String query = \"INSERT INTO employee VALUES ( \" + nextId + \", ?,?,?,?,?,?,?,?,?,?,?,?,?,?)\";\n\n try\n {\n PreparedStatement ps = WebSession.getConnection(s).prepareStatement(query);\n\n ps.setString(1, employee.getFirstName().toLowerCase());\n ps.setString(2, employee.getLastName());\n ps.setString(3, employee.getSsn());\n ps.setString(4, employee.getTitle());\n ps.setString(5, employee.getPhoneNumber());\n ps.setString(6, employee.getAddress1());\n ps.setString(7, employee.getAddress2());\n ps.setInt(8, employee.getManager());\n ps.setString(9, employee.getStartDate());\n ps.setString(10, employee.getCcn());\n ps.setInt(11, employee.getCcnLimit());\n ps.setString(12, employee.getDisciplinaryActionDate());\n ps.setString(13, employee.getDisciplinaryActionNotes());\n ps.setString(14, employee.getPersonalDescription());\n\n ps.execute();\n } catch (SQLException sqle)\n {\n s.setMessage(\"Error updating employee profile\");\n sqle.printStackTrace();\n }\n } catch (Exception e)\n {\n s.setMessage(\"Error updating employee profile\");\n e.printStackTrace();\n }\n }\n\n private int getNextUID(WebSession s)\n {\n int uid = -1;\n try\n {\n Statement statement = WebSession.getConnection(s).createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,\n ResultSet.CONCUR_READ_ONLY);\n ResultSet results = statement.executeQuery(\"select max(userid) as uid from employee\");\n results.first();\n uid = results.getInt(\"uid\");\n } catch (SQLException sqle)\n {\n sqle.printStackTrace();\n s.setMessage(\"Error updating employee profile\");\n }\n return uid + 1;\n }\n}\n", "meta": {"content_hash": "eec0d297b3bc76e40f3130914ef500bb", "timestamp": "", "source": "github", "line_count": 199, "max_line_length": 137, "avg_line_length": 44.90954773869347, "alnum_prop": 0.6055723397113125, "repo_name": "aseemsbapat/test", "id": "a14823155fce0d70c961109dda0149c9c3cafeba", "size": "10166", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "UpdateProfileDBCrossSiteScripting.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C#", "bytes": "522"}, {"name": "CSS", "bytes": "32037"}, {"name": "HTML", "bytes": "1085343"}, {"name": "Java", "bytes": "1094660"}, {"name": "JavaScript", "bytes": "372790"}, {"name": "PHP", "bytes": "172910"}, {"name": "Roff", "bytes": "22608"}]}} {"text": " $query,\n ]);\n\n $this->load($params);\n\n if (!$this->validate()) {\n // uncomment the following line if you do not want to return any records when validation fails\n // $query->where('0=1');\n return $dataProvider;\n }\n\n // grid filtering conditions\n $query->andFilterWhere([\n 'idClientes' => $this->idClientes,\n 'prioridad' => $this->prioridad,\n 'telefono' => $this->telefono,\n ]);\n\n $query->andFilterWhere(['like', 'ci_cliente', $this->ci_cliente])\n ->andFilterWhere(['like', 'nombre', $this->nombre])\n ->andFilterWhere(['like', 'horario_atencion', $this->horario_atencion])\n ->andFilterWhere(['like', 'email', $this->email]);\n\n return $dataProvider;\n }\n}\n", "meta": {"content_hash": "b37fd0dd039133410e0d765b85b31774", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 106, "avg_line_length": 25.18918918918919, "alnum_prop": 0.5574034334763949, "repo_name": "Nacho2126/php20162", "id": "c1cd90f235b749d1d52827dc04a0d6b9754115d1", "size": "1864", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "backend/models/ClientesSearch.php", "mode": "33261", "license": "bsd-3-clause", "language": [{"name": "ApacheConf", "bytes": "342"}, {"name": "Batchfile", "bytes": "1546"}, {"name": "CSS", "bytes": "161818"}, {"name": "HTML", "bytes": "25298"}, {"name": "JavaScript", "bytes": "364350"}, {"name": "PHP", "bytes": "240791"}, {"name": "Shell", "bytes": "3256"}]}} {"text": "package pivkey\n\nimport (\n\t\"github.com/go-piv/piv-go/piv\"\n)\n\nfunc SlotForName(slotName string) *piv.Slot {\n\tswitch slotName {\n\tcase \"\":\n\t\treturn &piv.SlotSignature\n\tcase \"authentication\":\n\t\treturn &piv.SlotAuthentication\n\tcase \"signature\":\n\t\treturn &piv.SlotSignature\n\tcase \"card-authentication\":\n\t\treturn &piv.SlotCardAuthentication\n\tcase \"key-management\":\n\t\treturn &piv.SlotKeyManagement\n\tdefault:\n\t\treturn nil\n\t}\n}\n\nfunc PINPolicyForName(policyName string, slot piv.Slot) piv.PINPolicy {\n\tswitch policyName {\n\tcase \"\":\n\t\treturn defaultPINPolicyForSlot(slot)\n\tcase \"never\":\n\t\treturn piv.PINPolicyNever\n\tcase \"once\":\n\t\treturn piv.PINPolicyOnce\n\tcase \"always\":\n\t\treturn piv.PINPolicyAlways\n\tdefault:\n\t\treturn -1\n\t}\n}\n\nfunc TouchPolicyForName(policyName string, slot piv.Slot) piv.TouchPolicy {\n\tswitch policyName {\n\tcase \"\":\n\t\treturn defaultTouchPolicyForSlot(slot)\n\tcase \"never\":\n\t\treturn piv.TouchPolicyNever\n\tcase \"cached\":\n\t\treturn piv.TouchPolicyCached\n\tcase \"always\":\n\t\treturn piv.TouchPolicyAlways\n\tdefault:\n\t\treturn -1\n\t}\n}\n\nfunc defaultPINPolicyForSlot(slot piv.Slot) piv.PINPolicy {\n\t//\n\t// Defaults from https://developers.yubico.com/PIV/Introduction/Certificate_slots.html\n\t//\n\n\tswitch slot {\n\tcase piv.SlotAuthentication:\n\t\treturn piv.PINPolicyOnce\n\tcase piv.SlotSignature:\n\t\treturn piv.PINPolicyAlways\n\tcase piv.SlotKeyManagement:\n\t\treturn piv.PINPolicyOnce\n\tcase piv.SlotCardAuthentication:\n\t\treturn piv.PINPolicyNever\n\tdefault:\n\t\t// This should never happen\n\t\tpanic(\"invalid value for slot\")\n\t}\n}\n\nfunc defaultTouchPolicyForSlot(slot piv.Slot) piv.TouchPolicy {\n\t//\n\t// Defaults from https://developers.yubico.com/PIV/Introduction/Certificate_slots.html\n\t//\n\n\tswitch slot {\n\tcase piv.SlotAuthentication:\n\t\treturn piv.TouchPolicyCached\n\tcase piv.SlotSignature:\n\t\treturn piv.TouchPolicyAlways\n\tcase piv.SlotKeyManagement:\n\t\treturn piv.TouchPolicyCached\n\tcase piv.SlotCardAuthentication:\n\t\treturn piv.TouchPolicyNever\n\tdefault:\n\t\t// This should never happen\n\t\tpanic(\"invalid value for slot\")\n\t}\n}\n", "meta": {"content_hash": "776ea3c863945fe997a86eb7014ae009", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 87, "avg_line_length": 21.83695652173913, "alnum_prop": 0.7610751617720258, "repo_name": "tektoncd/operator", "id": "a323716e6b4d31588890f94ad9ab34da28b70d05", "size": "2655", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "vendor/github.com/sigstore/cosign/pkg/cosign/pivkey/util.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Dockerfile", "bytes": "1972"}, {"name": "Go", "bytes": "1049401"}, {"name": "Makefile", "bytes": "7852"}, {"name": "Python", "bytes": "14477"}, {"name": "Shell", "bytes": "42938"}, {"name": "Smarty", "bytes": "4243"}]}} {"text": "\n 3.4\n galaxy1\n \n \n topic_0632\n \n \n operation_0231\n \n \n \n \n cutadapt\n \n \n \n \n 10.14806/ej.17.1.200\n \n \n \n \n cutadapt\n \n \n \n\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n\n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n\n
\n\n \n \n \n \n \n \n \n\n \n \n \n \n\n \n \n \n\n \n \n \n \n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n\n \n\n
\n\n
\n\n \n\n \n \n\n \n
\n\n \n \n \n \n \n \n \n\n \n \n \n \n\n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n\n \n\n
\n\n
\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n\n
\n", "meta": {"content_hash": "7273be33487bd7118b51d835119d7da2", "timestamp": "", "source": "github", "line_count": 381, "max_line_length": 516, "avg_line_length": 58.55380577427822, "alnum_prop": 0.5371374781478327, "repo_name": "pjbriggs/tools-iuc", "id": "ba2b0fa5f8f53a75a531ba72d37f85cc9aaac6df", "size": "22309", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tools/cutadapt/macros.xml", "mode": "33188", "license": "mit", "language": [{"name": "C++", "bytes": "4220"}, {"name": "HTML", "bytes": "11558401"}, {"name": "Mako", "bytes": "2116"}, {"name": "Max", "bytes": "140358"}, {"name": "OpenEdge ABL", "bytes": "1960016"}, {"name": "Pep8", "bytes": "87474"}, {"name": "Perl", "bytes": "58627"}, {"name": "Python", "bytes": "741604"}, {"name": "R", "bytes": "268177"}, {"name": "Rebol", "bytes": "1225"}, {"name": "Roff", "bytes": "3011"}, {"name": "Shell", "bytes": "84319"}, {"name": "UnrealScript", "bytes": "660637"}, {"name": "eC", "bytes": "24"}]}} {"text": "'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar React = require('react');\nvar tooltip = require('primereact/tooltip');\nvar utils = require('primereact/utils');\n\nfunction _interopNamespace(e) {\n if (e && e.__esModule) return e;\n var n = Object.create(null);\n if (e) {\n Object.keys(e).forEach(function (k) {\n if (k !== 'default') {\n var d = Object.getOwnPropertyDescriptor(e, k);\n Object.defineProperty(n, k, d.get ? d : {\n enumerable: true,\n get: function () { return e[k]; }\n });\n }\n });\n }\n n[\"default\"] = e;\n return Object.freeze(n);\n}\n\nvar React__namespace = /*#__PURE__*/_interopNamespace(React);\n\nfunction _extends() {\n _extends = Object.assign || function (target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i];\n\n for (var key in source) {\n if (Object.prototype.hasOwnProperty.call(source, key)) {\n target[key] = source[key];\n }\n }\n }\n\n return target;\n };\n\n return _extends.apply(this, arguments);\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nfunction _arrayWithHoles(arr) {\n if (Array.isArray(arr)) return arr;\n}\n\nfunction _iterableToArrayLimit(arr, i) {\n var _i = arr == null ? null : typeof Symbol !== \"undefined\" && arr[Symbol.iterator] || arr[\"@@iterator\"];\n\n if (_i == null) return;\n var _arr = [];\n var _n = true;\n var _d = false;\n\n var _s, _e;\n\n try {\n for (_i = _i.call(arr); !(_n = (_s = _i.next()).done); _n = true) {\n _arr.push(_s.value);\n\n if (i && _arr.length === i) break;\n }\n } catch (err) {\n _d = true;\n _e = err;\n } finally {\n try {\n if (!_n && _i[\"return\"] != null) _i[\"return\"]();\n } finally {\n if (_d) throw _e;\n }\n }\n\n return _arr;\n}\n\nfunction _arrayLikeToArray(arr, len) {\n if (len == null || len > arr.length) len = arr.length;\n\n for (var i = 0, arr2 = new Array(len); i < len; i++) {\n arr2[i] = arr[i];\n }\n\n return arr2;\n}\n\nfunction _unsupportedIterableToArray(o, minLen) {\n if (!o) return;\n if (typeof o === \"string\") return _arrayLikeToArray(o, minLen);\n var n = Object.prototype.toString.call(o).slice(8, -1);\n if (n === \"Object\" && o.constructor) n = o.constructor.name;\n if (n === \"Map\" || n === \"Set\") return Array.from(o);\n if (n === \"Arguments\" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen);\n}\n\nfunction _nonIterableRest() {\n throw new TypeError(\"Invalid attempt to destructure non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.\");\n}\n\nfunction _slicedToArray(arr, i) {\n return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest();\n}\n\nfunction ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }\n\nfunction _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }\nvar InputSwitch = /*#__PURE__*/React__namespace.memo( /*#__PURE__*/React__namespace.forwardRef(function (props, ref) {\n var _React$useState = React__namespace.useState(false),\n _React$useState2 = _slicedToArray(_React$useState, 2),\n focusedState = _React$useState2[0],\n setFocusedState = _React$useState2[1];\n\n var elementRef = React__namespace.useRef(null);\n var inputRef = React__namespace.useRef(props.inputRef);\n var checked = props.checked === props.trueValue;\n\n var onClick = function onClick(event) {\n if (props.disabled) {\n return;\n }\n\n toggle(event);\n utils.DomHandler.focus(inputRef.current);\n event.preventDefault();\n };\n\n var toggle = function toggle(event) {\n if (props.onChange) {\n var value = checked ? props.falseValue : props.trueValue;\n props.onChange({\n originalEvent: event,\n value: value,\n stopPropagation: function stopPropagation() {},\n preventDefault: function preventDefault() {},\n target: {\n name: props.name,\n id: props.id,\n value: value\n }\n });\n }\n };\n\n var onFocus = function onFocus(event) {\n setFocusedState(true);\n props.onFocus && props.onFocus(event);\n };\n\n var onBlur = function onBlur(event) {\n setFocusedState(false);\n props.onBlur && props.onBlur(event);\n };\n\n React__namespace.useImperativeHandle(ref, function () {\n return _objectSpread({\n getElement: function getElement() {\n return elementRef.current;\n },\n getInput: function getInput() {\n return elementRef.current;\n }\n }, props);\n });\n React__namespace.useEffect(function () {\n utils.ObjectUtils.combinedRefs(inputRef, props.inputRef);\n }, [inputRef, props.inputRef]);\n var hasTooltip = utils.ObjectUtils.isNotEmpty(props.tooltip);\n var otherProps = utils.ObjectUtils.findDiffKeys(props, InputSwitch.defaultProps);\n var className = utils.classNames('p-inputswitch p-component', {\n 'p-inputswitch-checked': checked,\n 'p-disabled': props.disabled,\n 'p-focus': focusedState\n }, props.className);\n return /*#__PURE__*/React__namespace.createElement(React__namespace.Fragment, null, /*#__PURE__*/React__namespace.createElement(\"div\", _extends({\n ref: elementRef,\n id: props.id,\n className: className,\n style: props.style\n }, otherProps, {\n onClick: onClick,\n role: \"checkbox\",\n \"aria-checked\": checked\n }), /*#__PURE__*/React__namespace.createElement(\"div\", {\n className: \"p-hidden-accessible\"\n }, /*#__PURE__*/React__namespace.createElement(\"input\", {\n ref: inputRef,\n type: \"checkbox\",\n id: props.inputId,\n name: props.name,\n checked: checked,\n onChange: toggle,\n onFocus: onFocus,\n onBlur: onBlur,\n disabled: props.disabled,\n role: \"switch\",\n tabIndex: props.tabIndex,\n \"aria-checked\": checked,\n \"aria-labelledby\": props['aria-labelledby'],\n \"aria-label\": props['aria-label']\n })), /*#__PURE__*/React__namespace.createElement(\"span\", {\n className: \"p-inputswitch-slider\"\n })), hasTooltip && /*#__PURE__*/React__namespace.createElement(tooltip.Tooltip, _extends({\n target: elementRef,\n content: props.tooltip\n }, props.tooltipOptions)));\n}));\nInputSwitch.displayName = 'InputSwitch';\nInputSwitch.defaultProps = {\n __TYPE: 'InputSwitch',\n id: null,\n inputRef: null,\n style: null,\n className: null,\n inputId: null,\n name: null,\n tabIndex: null,\n checked: false,\n trueValue: true,\n falseValue: false,\n disabled: false,\n tooltip: null,\n tooltipOptions: null,\n 'aria-label': null,\n 'aria-labelledby': null,\n onChange: null,\n onFocus: null,\n onBlur: null\n};\n\nexports.InputSwitch = InputSwitch;\n", "meta": {"content_hash": "094e587a17c4ee77902e347534369fb8", "timestamp": "", "source": "github", "line_count": 249, "max_line_length": 506, "avg_line_length": 30.248995983935743, "alnum_prop": 0.6319702602230484, "repo_name": "cdnjs/cdnjs", "id": "b263e1fd46e8ff5512b9d5b112066e325aa0c9ac", "size": "7532", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ajax/libs/primereact/8.3.0/inputswitch/inputswitch.cjs.js", "mode": "33188", "license": "mit", "language": []}} {"text": "var Event = function Event(Title, WorldWar, SocialStability, TechnologicalProgress, Message, Event_Variable) {\n this.Title = Title;\n this.WorldWar = WorldWar;\n this.SocialStability = SocialStability;\n this.TechnologicalProgress = TechnologicalProgress;\n this.Message = Message;\n this.Event_Variable = Event_Variable;\n}\n\n//Protoype allows us to implement pseudoclasses into Javascript.\n//It is necessary to define the 'call' method on the Event Constructor so that we can trigger the\n//event's effects.\nEvent.prototype = {\n call: function() {\n Metrics.WorldWar += this.WorldWar;\n Metrics.SocialStability += this.SocialStability;\n Metrics.TechnologicalProgress += this.TechnologicalProgress;\n if (this.Event_Variable == \"CityFive\")\n {\n CityFive = true;\n }\n return (this.Message + \"\")\n }\n};\n\n\n//An EmptyEvent exist if you delete an event from the Timeline. It is created\n//so that I don't have to worry about calling \"undefined\" events.\nvar EmptyEvent = function () {\n}\n\n//At the beginning of the game, the following four events are inserted into the Timeline and are triggered.\n//new Event(WorldWar, SocialStability, TechnologicalProgress, Message, optional Event_Varaible)\nvar CityFiveBuilt = new Event(\"CityFiveBuilt\",-20,0,20, CITY_FIVE_BUILT,\"CityFive\")\n\nvar TheOrder = new Event(\"TheOrder\",0,20,-20, THE_ORDER);\n//In the original version of Zybourne Clock, Dr. Zybourne developed his time-travelling Clock after the World War. But I do not want to implement events being dependent on each other until AFTER the MVP.\nvar ClockInvented = new Event(\"ClockInvented\", -20,0,20, CLOCK_INVENTED);\n\nvar Zepplins = new Event(\"Zepplins\",-20,20,0, ZEPPLINS);\n\n//These events are in the Player's hand at the start of Zybourne Clock Redux. This is just a MVP, so I only have two events here for now.\nvar ChildSpybots = new Event(\"ChildSpybots\",20,-20,0, CHILD_SPYBOTS)\n\nvar Vaundermause = new Event(\"Vaundermause\",20,0,-20, VAUNDERMAUSE)\n\nvar SocialPlanningRevolution = new Event(\"SocialPlanningRevolution\",-40,20,20, SOCIAL_PLANNING)\n\nvar Nanotech = new Event(\"Nanotech\",20,-40,20, NANOTECH)\n\nvar LuddistCrusade = new Event(\"LuddistCrusade\",0,40,-40, LUDDIST_CRUSADE)\n\nvar Timeline = []\n\nvar PlayerHand = [ChildSpybots, Vaundermause, SocialPlanningRevolution, Nanotech, LuddistCrusade]\n", "meta": {"content_hash": "14b4e1f5ba710e7c0cdf6fef5a72fcd1", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 203, "avg_line_length": 43.163636363636364, "alnum_prop": 0.7502106149957877, "repo_name": "tra38/Zybourne-Clock-Redux", "id": "37a1a89af37f147eccff2172bb4682b95be13b1b", "size": "2533", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/assets/javascripts/timeline_events.js", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "928"}, {"name": "CoffeeScript", "bytes": "211"}, {"name": "HTML", "bytes": "7737"}, {"name": "JavaScript", "bytes": "22687"}, {"name": "Ruby", "bytes": "21100"}]}} {"text": "\ufeffusing System;\nusing System.Collections.Generic;\nusing System.Linq;\n\nusing MonoTouch.Foundation;\nusing MonoTouch.UIKit;\n\nnamespace Linq2TwitterSample\n{\n\t// The UIApplicationDelegate for the application. This class is responsible for launching the\n\t// User Interface of the application, as well as listening (and optionally responding) to\n\t// application events from iOS.\n\t[Register (\"AppDelegate\")]\n\tpublic partial class AppDelegate : UIApplicationDelegate\n\t{\n\t\t// class-level declarations\n\t\t\n\t\tpublic override UIWindow Window {\n\t\t\tget;\n\t\t\tset;\n\t\t}\n\t\t\n\t\t// This method is invoked when the application is about to move from active to inactive state.\n\t\t// OpenGL applications should use this method to pause.\n\t\tpublic override void OnResignActivation (UIApplication application)\n\t\t{\n\t\t}\n\t\t\n\t\t// This method should be used to release shared resources and it should store the application state.\n\t\t// If your application supports background exection this method is called instead of WillTerminate\n\t\t// when the user quits.\n\t\tpublic override void DidEnterBackground (UIApplication application)\n\t\t{\n\t\t}\n\t\t\n\t\t// This method is called as part of the transiton from background to active state.\n\t\tpublic override void WillEnterForeground (UIApplication application)\n\t\t{\n\t\t}\n\t\t\n\t\t// This method is called when the application is about to terminate. Save data, if needed.\n\t\tpublic override void WillTerminate (UIApplication application)\n\t\t{\n\t\t}\n\t}\n}\n\n", "meta": {"content_hash": "dcc28da19aa0881a6b9bc1aa83aad202", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 102, "avg_line_length": 30.595744680851062, "alnum_prop": 0.7600834492350487, "repo_name": "SotoiGhost/Linq2TwitterSample", "id": "4197deb07c8adc0a020d77dec7abc30d27d93584", "size": "1440", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Linq2TwitterSample/AppDelegate.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "6431"}]}} {"text": "package nl.soccar.library.test;\r\n\r\nimport java.time.LocalTime;\r\nimport java.util.Optional;\r\nimport nl.soccar.library.Event;\r\nimport nl.soccar.library.Game;\r\nimport nl.soccar.library.Player;\r\nimport nl.soccar.library.enumeration.CarType;\r\nimport nl.soccar.library.enumeration.Duration;\r\nimport nl.soccar.library.enumeration.EventType;\r\nimport nl.soccar.library.enumeration.GameStatus;\r\nimport nl.soccar.library.enumeration.Privilege;\r\nimport nl.soccar.ui.DisplayConstants;\r\nimport org.junit.Before;\r\nimport org.junit.Test;\r\nimport static org.junit.Assert.*;\r\n\r\n/**\r\n * JUnit test that tests the nl.soccar.library.Game class.\r\n *\r\n * @author PTS34A\r\n */\r\npublic class GameTest {\r\n\r\n // Declaration of test objects.\r\n private Player player;\r\n private Event event;\r\n private Game game;\r\n\r\n /**\r\n * Instantiation of test objects.\r\n */\r\n @Before\r\n public void setUp() {\r\n player = new Player(\"username\", Privilege.NORMAL, CarType.CASUAL);\r\n event = new Event(EventType.GOAL_RED, LocalTime.of(13, 55, 03), player);\r\n game = new Game();\r\n }\r\n\r\n /**\r\n * Tests the start method.\r\n */\r\n @Test\r\n public void startTest() {\r\n game.start();\r\n assertEquals(GameStatus.RUNNING, game.getStatus());\r\n }\r\n\r\n /**\r\n * Tests the stop method.\r\n */\r\n @Test\r\n public void stopTest() {\r\n game.stop();\r\n assertEquals(GameStatus.STOPPED, game.getStatus());\r\n }\r\n\r\n /**\r\n * Tests the addEvent and getEvents methods.\r\n */\r\n @Test\r\n public void addEventAndGetEventsTest() {\r\n game.addEvent(event);\r\n assertEquals(event, game.getEvents().get(0));\r\n }\r\n\r\n /**\r\n * Tests the getStarttime method.\r\n */\r\n @Test\r\n public void getStartTimeTest() {\r\n assertEquals(Optional.empty(), game.getStartTime());\r\n }\r\n\r\n /**\r\n * Tests the getStatus and setStatus methods.\r\n */\r\n @Test\r\n public void getStatusAndSetStatusTest() {\r\n assertEquals(GameStatus.STOPPED, game.getStatus());\r\n game.setStatus(GameStatus.SCORED);\r\n assertEquals(GameStatus.SCORED, game.getStatus());\r\n }\r\n\r\n /**\r\n * Tests the getDuration and setDuration methods.\r\n */\r\n @Test\r\n public void getDurationAndSetDurationTest() {\r\n game.setDuration(Duration.MINUTES_3);\r\n assertEquals(Duration.MINUTES_3, game.getDuration());\r\n }\r\n\r\n /**\r\n * Tests the getMap method.\r\n */\r\n @Test\r\n public void getMapTest() {\r\n assertEquals(0, Math.round(game.getMap().getSize().getX()));\r\n assertEquals(0, Math.round(game.getMap().getSize().getY()));\r\n assertEquals(Math.round(DisplayConstants.MAP_WIDTH), Math.round(game.getMap().getSize().getWidth()));\r\n assertEquals(Math.round(DisplayConstants.MAP_HEIGHT), Math.round(game.getMap().getSize().getHeight()));\r\n }\r\n \r\n /**\r\n * Tests the getLastBallTouched andSetLastBallTouched methods.\r\n */\r\n @Test\r\n public void getLastBallTouchedAndSetLastBallTouchedTest() {\r\n game.setLastBallTouched(player);\r\n assertEquals(player, game.getLastBallTouched());\r\n }\r\n \r\n}\r\n", "meta": {"content_hash": "efcaef1e7ed640ec248955d49738e987", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 111, "avg_line_length": 27.69298245614035, "alnum_prop": 0.6262274311054798, "repo_name": "PTS3-S34A/Soccar", "id": "c2bb3a3834ae9819ea3957941f620832347a8138", "size": "3157", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Soccar [Client]/test/nl/soccar/library/test/GameTest.java", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "1119"}, {"name": "Java", "bytes": "191451"}]}} {"text": "\r\n// tjh Headers\r\n#include \"../src/Publisher.h\"\r\n\r\n// System Headers\r\n#include \r\n#include \r\n#include \r\n#include \r\n\r\nnamespace tjh\r\n{\r\nnamespace example\r\n{\r\n\r\n//! A listener class which subscribes to an exterior publication.\r\nclass Client\r\n{\r\npublic:\r\n //! @note: There is no constraint requiring an IPublisher to be passed into the\r\n //! constructor of a listener, this pattern was simply used to demonstrate the \r\n //! mechanism of subscription. The shared_ptr callback and the IPublisher object\r\n //! just, at some point, need to exist in the same scope.\r\n Client(IPublisher& publisher)\r\n {\r\n // If the Client is going to subsribe to publications, it needs to create\r\n // a callback object. This callback object is a shared_ptr owned by the \r\n // Client; the callback will have the same life-span as the naked\r\n // \"this\" created within the bound function. This ensures that the\r\n // bound function is either valid, or its parent shared_ptr has\r\n // already been destructed because the object itself no longer exists.\r\n _callback =\r\n tjh::make_callback(&Client::handler, this);\r\n\r\n // The shared_ptr is provided to the IPublisher interface, and the IPublisher\r\n // interface holds a weak_ptr that is checked for validity before the callback is\r\n // fired.\r\n publisher.subscribe(_callback);\r\n }\r\n\r\nprivate:\r\n void handler(int a, const std::string& b)\r\n {\r\n // Published arguments!\r\n std::cout << __FUNCTION__ << \": a=\" << a << \", b=\" << b << std::endl;\r\n\r\n // Upon receiving a publication from the subscriber, the listener has determined\r\n // it no longer needs to exist, so it deletes itself. This deletion nulls the\r\n // weak_ptr held in the IPublisher object which will be cleaned up during the\r\n // next publication.\r\n // @note: The below line ONLY works if the object was heap allocated; if the \r\n // object was allocated on the stack, a double deletion will occur.\r\n delete this;\r\n }\r\n\r\n // The callback object enforces the requirement that this instance's lifetime is the\r\n // same as the contained function's lifetime.\r\n std::shared_ptr> _callback;\r\n};\r\n\r\nclass Primary : public IPublisher\r\n{\r\npublic:\r\n virtual void subscribe(std::weak_ptr > callback)\r\n {\r\n // This is a simple pass through to the composed publisher.\r\n _publisher.subscribe(callback);\r\n }\r\n\r\n void doSomething(int a, const std::string& b)\r\n {\r\n // Publish the contents of a and b to any listeners.\r\n _publisher.publish(a, b);\r\n }\r\n\r\nprivate:\r\n tjh::Publisher _publisher;\r\n};\r\n\r\n} // end example\r\n} // end tjh\r\n\r\n#endif // EXAMPLE_H", "meta": {"content_hash": "38729259009835b30d73bdffd602ffb1", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 97, "avg_line_length": 35.734939759036145, "alnum_prop": 0.6429534726904923, "repo_name": "tjhalva/Publisher", "id": "e5f194b73824ab8ce47569e6a7eb58272522308b", "size": "3004", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/Example.h", "mode": "33188", "license": "mit", "language": [{"name": "C++", "bytes": "13338"}]}} {"text": "module RspecWiki\n class Markdown\n\n attr_reader :context\n\n def initialize context\n @context = context\n end\n\n def anchor_link\n { anchor_group => \"* [#{title_with_status}](#{RspecWiki.configuration.wiki_page_url}##{title_anchor})\"}\n end\n\n def anchor_group\n return \"#{h3}#{context.metadata[:group]}\" if context.metadata[:group].present?\n described_class = context.described_class.split(\"::\").last\n described_class.gsub!('Controller', '')\n described_class.underscore.humanize.prepend(h3)\n end\n\n def print\n <<-EOC\n#{header}\n#{body}\n#{footer}\n EOC\n end\n\n def header\n \"#{h3}#{title}\"\n end\n\n def body\n <<-EOC\n#{description}\n#{url} | #{request_method}\n\n#{parameters}\n\nResponse\n#{status}\n#{javascript(context.response_body)}\n EOC\n end\n\n def footer\n <<-EOC\n#{ back_to_table_of_content }\n\n#{ '='*100 }\n EOC\n end\n\n\n def description\n <<-EOC\n`#{context.content}`\n EOC\n end\n\n def status\n \"Status: \\`#{context.response_status}\\`\"\n end\n\n def parameters\n \"Parameters: #{format_params}\"\n end\n\n def url\n \"URL: __#{context.request_path}__\"\n end\n\n def request_method\n \"Method: \\`#{context.request_method}\\`\"\n end\n\n private\n\n def title\n (context.metadata[:title] || context.content).humanize\n end\n\n def title_with_status\n status = context.response_success? ? \"(SUCCESS)\" : \"(ERROR)\"\n \"#{title} #{status}\"\n end\n\n def title_anchor\n title.downcase.gsub(/\\s+/, '-')\n end\n\n def format_params\n context.params.collect do |key, value|\n \"\\`#{key}\\` => #{value}\"\n end.join(\"\\n\\n\")\n end\n\n def h3\n '###'\n end\n\n def javascript content\n <<-EOC\n```javascript\n#{content}\n```\n EOC\n end\n\n def back_to_table_of_content\n \"[Back to table of contents](#{RspecWiki.configuration.wiki_page_url}#table-of-contents)\"\n end\n end\nend", "meta": {"content_hash": "8be93f2c1b0eacfea0fd5da8a1eefd47", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 109, "avg_line_length": 17.13157894736842, "alnum_prop": 0.5734767025089605, "repo_name": "nlds90/rspec_wiki", "id": "91ac239716d4a828d05ed2680bc618fa37860b8b", "size": "1953", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/rspec_wiki/formatter/markdown.rb", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "683"}, {"name": "HTML", "bytes": "4883"}, {"name": "JavaScript", "bytes": "599"}, {"name": "Ruby", "bytes": "26134"}]}} {"text": "ACCEPTED\n\n#### According to\nIndex Fungorum\n\n#### Published in\nnull\n\n#### Original name\nGraphina chloroleuca M\u00fcll. Arg.\n\n### Remarks\nnull", "meta": {"content_hash": "b6d01ef3643e7f4bb4be5fd89fb77ee0", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 10.461538461538462, "alnum_prop": 0.7058823529411765, "repo_name": "mdoering/backbone", "id": "808b6cdb5c3858b5bb710b66ea453efb4233e682", "size": "193", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Lecanoromycetes/Ostropales/Graphidaceae/Graphina/Graphina chloroleuca/README.md", "mode": "33188", "license": "apache-2.0", "language": []}} {"text": "//===----------------------------------------------------------------------===//\n//\n// Peloton\n//\n// abstract_scan_executor.h\n//\n// Identification: src/include/executor/abstract_scan_executor.h\n//\n// Copyright (c) 2015-17, Carnegie Mellon University Database Group\n//\n//===----------------------------------------------------------------------===//\n\n#pragma once\n\n#include \"executor/abstract_executor.h\"\n#include \"planner/abstract_scan_plan.h\"\n#include \"type/types.h\"\n\nnamespace peloton {\nnamespace executor {\n\n/**\n * Super class for different kinds of scan executor.\n * It provides common codes for all kinds of scan:\n * evaluate generic predicates and simple projections.\n */\nclass AbstractScanExecutor : public AbstractExecutor {\n public:\n AbstractScanExecutor(const AbstractScanExecutor &) = delete;\n AbstractScanExecutor &operator=(const AbstractScanExecutor &) = delete;\n AbstractScanExecutor(AbstractScanExecutor &&) = delete;\n AbstractScanExecutor &operator=(AbstractScanExecutor &&) = delete;\n\n explicit AbstractScanExecutor(const planner::AbstractPlan *node,\n ExecutorContext *executor_context);\n\n virtual void UpdatePredicate(const std::vector &column_ids\n UNUSED_ATTRIBUTE,\n const std::vector &values\n UNUSED_ATTRIBUTE) {}\n\n virtual void ResetState() {}\n\n protected:\n bool DInit();\n\n virtual bool DExecute() = 0;\n\n protected:\n //===--------------------------------------------------------------------===//\n // Plan Info\n //===--------------------------------------------------------------------===//\n\n /** @brief Selection predicate. */\n const expression::AbstractExpression *predicate_ = nullptr;\n\n /** @brief Columns from tile group to be added to logical tile output. */\n std::vector column_ids_;\n};\n\n} // namespace executor\n} // namespace peloton\n", "meta": {"content_hash": "2960e75f46996173b93b6cc917f7d2b6", "timestamp": "", "source": "github", "line_count": 62, "max_line_length": 80, "avg_line_length": 31.629032258064516, "alnum_prop": 0.5599184089750128, "repo_name": "prashasthip/peloton", "id": "44269c196dcea5898cbb03956693af6e9aef5d43", "size": "1961", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/include/executor/abstract_scan_executor.h", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "44625"}, {"name": "C++", "bytes": "5767040"}, {"name": "CMake", "bytes": "109338"}, {"name": "Java", "bytes": "44640"}, {"name": "Objective-C", "bytes": "4030"}, {"name": "PLpgSQL", "bytes": "5855"}, {"name": "Python", "bytes": "63317"}, {"name": "Ruby", "bytes": "1310"}, {"name": "Shell", "bytes": "14149"}]}} {"text": "\"\"\"\nTest for controlnet.py\n\"\"\"\n\nimport unittest\nfrom mininet.util import pexpect\n\nfrom sys import stdout\n\nclass testControlNet( unittest.TestCase ):\n\n prompt = 'mininet>'\n\n def testPingall( self ):\n \"Simple pingall test that verifies 0% packet drop in data network\"\n p = pexpect.spawn( 'python -m mininet.examples.controlnet', logfile=stdout)\n p.expect( self.prompt )\n p.sendline( 'pingall' )\n p.expect ( '(\\d+)% dropped' )\n percent = int( p.match.group( 1 ) ) if p.match else -1\n self.assertEqual( percent, 0 )\n p.expect( self.prompt )\n p.sendline( 'exit' )\n p.wait()\n\n def testFailover( self ):\n \"Kill controllers and verify that switch, s1, fails over properly\"\n count = 1\n p = pexpect.spawn( 'python -m mininet.examples.controlnet', logfile=stdout )\n p.expect( self.prompt )\n lp = pexpect.spawn( 'tail -f /tmp/s1-ofp.log', logfile=stdout )\n lp.expect( 'tcp:\\d+\\.\\d+\\.\\d+\\.(\\d+):\\d+: connected' )\n ip = int( lp.match.group( 1 ) )\n self.assertEqual( count, ip )\n count += 1\n for c in [ 'c0', 'c1' ]:\n p.sendline( '%s ifconfig %s-eth0 down' % ( c, c) )\n p.expect( self.prompt )\n lp.expect( 'tcp:\\d+\\.\\d+\\.\\d+\\.(\\d+):\\d+: connected' )\n ip = int( lp.match.group( 1 ) )\n self.assertEqual( count, ip )\n count += 1\n p.sendline( 'exit' )\n p.wait()\n\nif __name__ == '__main__':\n unittest.main()\n", "meta": {"content_hash": "dcd1b25ea0159b021904f1dcfe18f9f6", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 84, "avg_line_length": 32.340425531914896, "alnum_prop": 0.5473684210526316, "repo_name": "mininet/mininet", "id": "7e7ce9a6ba6f8fc8254b1ac5348350c7707a9ff2", "size": "1543", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "examples/test/test_controlnet.py", "mode": "33261", "license": "bsd-3-clause", "language": [{"name": "C", "bytes": "49941"}, {"name": "Makefile", "bytes": "2620"}, {"name": "Python", "bytes": "305497"}, {"name": "Shell", "bytes": "42436"}]}} {"text": "\npackage org.apache.activemq.plugin.util;\n\nimport java.util.Set;\n\nimport org.apache.activemq.broker.region.Destination;\nimport org.apache.activemq.broker.region.DestinationFilter;\nimport org.apache.activemq.broker.region.Queue;\nimport org.apache.activemq.broker.region.RegionBroker;\nimport org.apache.activemq.broker.region.Topic;\nimport org.apache.activemq.broker.region.policy.PolicyEntry;\nimport org.apache.activemq.broker.region.policy.PolicyMap;\nimport org.apache.activemq.plugin.AbstractRuntimeConfigurationBroker;\n\n\npublic class PolicyEntryUtil {\n\n\n /**\n * Find a matching PolicyEntry by looking up the Set of entries from the map and\n * then comparing the destination to find the exact match. This lets us be able to\n * find the correct policy entry to update even though there might be multiple that\n * are returned from the get method of the PolicyMap.\n *\n * @param runtimeBroker\n * @param entry\n * @return\n */\n public static PolicyEntry findEntryByDestination(AbstractRuntimeConfigurationBroker runtimeBroker,\n PolicyEntry entry) {\n\n PolicyMap existingMap = runtimeBroker.getBrokerService().getDestinationPolicy();\n @SuppressWarnings(\"unchecked\")\n Set existingEntries = existingMap.get(entry.getDestination());\n\n //First just look up by the destination type to see if anything matches\n PolicyEntry existingEntry = null;\n for (PolicyEntry ee: existingEntries) {\n if (ee.getDestination().equals(entry.getDestination())) {\n existingEntry = ee;\n break;\n }\n }\n return existingEntry;\n }\n\n /**\n * Utility to properly apply an updated policy entry to all existing destinations that\n * match this entry. The destination will only be updated if the policy is the exact\n * policy (most specific) that matches the destination.\n *\n * @param runtimeBroker\n * @param updatedEntry\n */\n public static void applyRetrospectively(AbstractRuntimeConfigurationBroker runtimeBroker,\n PolicyEntry updatedEntry) {\n PolicyEntryUtil.applyRetrospectively(runtimeBroker, updatedEntry, null);\n }\n\n /**\n *\n * Utility to properly apply an updated policy entry to all existing destinations that\n * match this entry. The destination will only be updated if the policy is the exact\n * policy (most specific) that matches the destination.\n *\n * The includedProperties List is optional and is used to specify a list of properties\n * to apply retrospectively to the matching destinations. This allows only certain properties\n * to be reapplied. If the list is null then all properties will be applied.\n *\n * @param runtimeBroker\n * @param updatedEntry\n * @param includedProperties\n */\n public static void applyRetrospectively(AbstractRuntimeConfigurationBroker runtimeBroker,\n PolicyEntry updatedEntry, Set includedProperties) {\n RegionBroker regionBroker = (RegionBroker) runtimeBroker.getBrokerService().getRegionBroker();\n for (Destination destination : regionBroker.getDestinations(updatedEntry.getDestination())) {\n //Look up the policy that applies to the destination\n PolicyEntry specificyPolicy = regionBroker.getDestinationPolicy().getEntryFor(\n destination.getActiveMQDestination());\n\n //only update the destination if it matches the specific policy being updated\n //currently just an identity check which is what we want\n if (updatedEntry.equals(specificyPolicy)){\n Destination target = destination;\n while (target instanceof DestinationFilter) {\n target = ((DestinationFilter)target).getNext();\n }\n //If we are providing a list of properties to set then use them\n //to set eligible properties that are in the includedProperties list\n if (target.getActiveMQDestination().isQueue()) {\n updatedEntry.update((Queue) target, includedProperties);\n } else if (target.getActiveMQDestination().isTopic()) {\n updatedEntry.update((Topic) target, includedProperties);\n }\n runtimeBroker.debug(\"applied update to:\" + target);\n }\n }\n }\n}\n", "meta": {"content_hash": "e5788ae49aded600bd76b08b5bfa8db1", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 102, "avg_line_length": 44.22, "alnum_prop": 0.6840796019900498, "repo_name": "chirino/activemq", "id": "64d752810dda8af64641ce340e80c1bc5389316c", "size": "5225", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "activemq-runtime-config/src/main/java/org/apache/activemq/plugin/util/PolicyEntryUtil.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "17712"}, {"name": "C#", "bytes": "27536"}, {"name": "C++", "bytes": "17404"}, {"name": "CSS", "bytes": "34997"}, {"name": "HTML", "bytes": "158883"}, {"name": "Java", "bytes": "25304453"}, {"name": "JavaScript", "bytes": "438641"}, {"name": "PHP", "bytes": "3665"}, {"name": "Perl", "bytes": "4128"}, {"name": "Protocol Buffer", "bytes": "13867"}, {"name": "Python", "bytes": "14547"}, {"name": "Ruby", "bytes": "6594"}, {"name": "Scala", "bytes": "302023"}, {"name": "Shell", "bytes": "87001"}]}} {"text": "module Syntax where\n\ndata Type =\n TyBool\n | TyNat\n deriving (Show, Eq)\n\ndata Term =\n TmTrue\n | TmFalse\n | TmIf Term Term Term\n | TmZero\n | TmSucc Term\n | TmPred Term\n | TmIsZero Term\n deriving Show\n\ntype Error = String\n", "meta": {"content_hash": "9fb7a16903e2a0e71f1533024c613e85", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 29, "avg_line_length": 13.88888888888889, "alnum_prop": 0.608, "repo_name": "kellino/TypeSystems", "id": "030956f76a40bfff9cad12cb7446bef6451120a3", "size": "250", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "typedArith/Syntax.hs", "mode": "33188", "license": "mit", "language": [{"name": "Agda", "bytes": "27249"}, {"name": "Haskell", "bytes": "103038"}]}} {"text": "getHttpResponse()->setCode($code);\n\n $this->sendJson([\n 'success' => false,\n 'error' => $code,\n 'message' => $message\n ]);\n }\n\n}", "meta": {"content_hash": "4d242af7c0728bb13a675e53ae740a3e", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 82, "avg_line_length": 19.5, "alnum_prop": 0.5920745920745921, "repo_name": "vitush93/rocnikac", "id": "8f9b7b86302ee26da7fec9f36ac31e32c5a0f5de", "size": "429", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "app/ApiModule/presenters/BasePresenter.php", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ApacheConf", "bytes": "884"}, {"name": "CSS", "bytes": "533582"}, {"name": "HTML", "bytes": "149898"}, {"name": "JavaScript", "bytes": "1152173"}, {"name": "PHP", "bytes": "202353"}, {"name": "Shell", "bytes": "165"}]}} {"text": " 0 and lastactivity < (\".getTimeStamp().\"- \".$chatroomTimeout.\" )\");\n\t$query = mysqli_query($GLOBALS['dbh'],$sql);\n\tif (defined('DEV_MODE') && DEV_MODE == '1') { echo mysqli_error($GLOBALS['dbh']); }\n}\n\nfunction chatroommessages() {\n\t$sql = (\"delete from cometchat_chatroommessages where sent < (\".getTimeStamp().\"-10800)\");\n\t$query = mysqli_query($GLOBALS['dbh'],$sql);\n\tif (defined('DEV_MODE') && DEV_MODE == '1') { echo mysqli_error($GLOBALS['dbh']); }\n}\n\nfunction chatroomsusers() {\n\t$sql = (\"delete from cometchat_chatrooms_users where lastactivity < (\".getTimeStamp().\"-3600)\");\n\t$query = mysqli_query($GLOBALS['dbh'],$sql);\n\tif (defined('DEV_MODE') && DEV_MODE == '1') { echo mysqli_error($GLOBALS['dbh']); }\n}\n", "meta": {"content_hash": "8e0b23f3352032d87953dc785769ea1b", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 128, "avg_line_length": 39.35294117647059, "alnum_prop": 0.6434977578475336, "repo_name": "networksoft/erp.wellnet", "id": "3c6df67a6c6cd7a1f410f8f7f952e99499aa8f3b", "size": "1338", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "cometchat/modules/chatrooms/cron.php", "mode": "33261", "license": "apache-2.0", "language": [{"name": "ApacheConf", "bytes": "383"}, {"name": "Batchfile", "bytes": "310"}, {"name": "C", "bytes": "479526"}, {"name": "CSS", "bytes": "916036"}, {"name": "Groff", "bytes": "60910"}, {"name": "HTML", "bytes": "7195685"}, {"name": "JavaScript", "bytes": "1616267"}, {"name": "Makefile", "bytes": "16519"}, {"name": "PHP", "bytes": "12824429"}, {"name": "Perl", "bytes": "50950"}, {"name": "Shell", "bytes": "27957"}]}} {"text": "package io.advantageous.qbit.service.dispatchers;\n\nimport io.advantageous.qbit.service.ServiceBuilder;\nimport io.advantageous.qbit.service.ServiceQueue;\n\nimport java.util.concurrent.TimeUnit;\nimport java.util.function.Supplier;\n\n\npublic class ShardedServiceWorkerBuilder {\n\n /**\n * Shard rule, if you don't set a shard rule, you get shard of first argument.\n * Shard rule by default.\n */\n private ShardRule shardRule;\n private ServiceBuilder serviceBuilder;\n private ServiceWorkers serviceDispatcher;\n private int workerCount = -1;\n private int flushInterval = -1;\n private TimeUnit timeUnit = TimeUnit.MILLISECONDS;\n private Supplier serviceObjectSupplier;\n\n public static ShardedServiceWorkerBuilder shardedServiceWorkerBuilder() {\n return new ShardedServiceWorkerBuilder();\n }\n\n public Supplier getServiceObjectSupplier() {\n return serviceObjectSupplier;\n }\n\n public ShardedServiceWorkerBuilder setServiceObjectSupplier(Supplier serviceObjectSupplier) {\n this.serviceObjectSupplier = serviceObjectSupplier;\n return this;\n }\n\n public int getWorkerCount() {\n if (workerCount == -1) {\n workerCount = Runtime.getRuntime().availableProcessors();\n }\n return workerCount;\n }\n\n public ShardedServiceWorkerBuilder setWorkerCount(int workerCount) {\n this.workerCount = workerCount;\n return this;\n }\n\n public ShardRule getShardRule() {\n if (shardRule == null) {\n final int workerCount = this.getWorkerCount();\n shardRule = (methodName, methodArgs, numWorkers) -> methodArgs[0].hashCode() % workerCount;\n }\n return shardRule;\n }\n\n public ShardedServiceWorkerBuilder setShardRule(ShardRule shardRule) {\n this.shardRule = shardRule;\n return this;\n }\n\n public ServiceBuilder getServiceBuilder() {\n if (serviceBuilder == null) {\n serviceBuilder = ServiceBuilder.serviceBuilder();\n return serviceBuilder;\n }\n return serviceBuilder.copy();\n }\n\n public ShardedServiceWorkerBuilder setServiceBuilder(ServiceBuilder serviceBuilder) {\n this.serviceBuilder = serviceBuilder;\n return this;\n }\n\n\n public ServiceWorkers getServiceDispatcher() {\n if (serviceDispatcher == null) {\n\n if (this.flushInterval == -1) {\n serviceDispatcher = ServiceWorkers.shardedWorkers(getShardRule());\n } else {\n serviceDispatcher = ServiceWorkers.shardedWorkers(getFlushInterval(),\n getTimeUnit(), getShardRule());\n }\n }\n return serviceDispatcher;\n }\n\n public ShardedServiceWorkerBuilder setServiceDispatcher(ServiceWorkers serviceDispatcher) {\n this.serviceDispatcher = serviceDispatcher;\n return this;\n }\n\n public int getFlushInterval() {\n return flushInterval;\n }\n\n public ShardedServiceWorkerBuilder setFlushInterval(int flushInterval) {\n this.flushInterval = flushInterval;\n return this;\n }\n\n public TimeUnit getTimeUnit() {\n return timeUnit;\n }\n\n public ShardedServiceWorkerBuilder setTimeUnit(TimeUnit timeUnit) {\n this.timeUnit = timeUnit;\n return this;\n }\n\n public ServiceMethodDispatcher build() {\n\n if (getServiceObjectSupplier() == null) {\n throw new IllegalStateException(\"serviceObjectSupplier must be set\");\n }\n final ServiceBuilder serviceBuilder = getServiceBuilder();\n\n for (int index = 0; index < getWorkerCount(); index++) {\n final ServiceQueue serviceQueue = serviceBuilder\n .setServiceObject(getServiceObjectSupplier().get()).build();\n getServiceDispatcher().addServices(serviceQueue);\n\n }\n return getServiceDispatcher();\n\n }\n}\n", "meta": {"content_hash": "d4cab8ca09c8232b1e52995429ac3ec7", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 105, "avg_line_length": 30.53125, "alnum_prop": 0.6665813715455476, "repo_name": "advantageous/qbit", "id": "39f38897bffec165bc1dd1b51ed008adfbaecca0", "size": "3908", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "qbit/core/src/main/java/io/advantageous/qbit/service/dispatchers/ShardedServiceWorkerBuilder.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "HTML", "bytes": "1064"}, {"name": "Java", "bytes": "2713914"}, {"name": "JavaScript", "bytes": "1141"}, {"name": "Lua", "bytes": "110"}, {"name": "Shell", "bytes": "181"}]}} {"text": "routeMatchers = $routeMatchers;\n }\n\n /**\n * @inheritdoc\n */\n public function compile(ParsedRoute $route, Request $request) : CompiledRoute\n {\n $hostMatches = [];\n $pathMatches = [];\n\n foreach ($this->routeMatchers as $routeMatcher) {\n if ($routeMatcher instanceof HostMatcher && !$routeMatcher->isMatch($route, $request, $hostMatches)) {\n return new CompiledRoute($route, false);\n } elseif ($routeMatcher instanceof PathMatcher && !$routeMatcher->isMatch($route, $request, $pathMatches)) {\n return new CompiledRoute($route, false);\n } elseif (!$routeMatcher->isMatch($route, $request)) {\n return new CompiledRoute($route, false);\n }\n }\n\n // If we've gotten here, then all the matchers matched\n $pathVars = array_merge($hostMatches, $pathMatches);\n\n return new CompiledRoute($route, true, $pathVars);\n }\n}\n", "meta": {"content_hash": "432d9d45e55b086ec1390ece1a209d21", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 120, "avg_line_length": 31.735849056603772, "alnum_prop": 0.6563614744351962, "repo_name": "opulencephp/Opulence", "id": "83a3bfc80eb3d3b7b2c2823a137d52215e81641e", "size": "1864", "binary": false, "copies": "1", "ref": "refs/heads/1.2", "path": "src/Opulence/Routing/Routes/Compilers/Compiler.php", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "2098"}, {"name": "Hack", "bytes": "979"}, {"name": "PHP", "bytes": "2407691"}, {"name": "Shell", "bytes": "1733"}]}} {"text": "/**\n * Generate HTTP headers from auth credentials\n * @param {AuthCredentials} props\n * @return {Promise} - promise that resolves with the headers\n */\nexport function getAuthenticationHeaders(props) {\n if (!props) {\n throw new Error('Must provide authentication information.');\n }\n\n // JWT always have priority over a simple app token\n if (props.jwt) {\n return {\n 'Authorization': 'Bearer ' + props.jwt\n };\n } else if (props.appToken) {\n return {\n 'app-token': props.appToken\n };\n }\n\n throw new Error('Must provide a JWT or a app token');\n}\n", "meta": {"content_hash": "83384ec27e3f4ced2c1873f6906535fe", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 68, "avg_line_length": 27.17391304347826, "alnum_prop": 0.6016, "repo_name": "bobhami/payrollbot", "id": "4c4eef679c0c2d72a65a89d8df74bb2fc21691d6", "size": "625", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "node_modules/smooch-bot/node_modules/smooch-core/src/utils/auth.js", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "213"}, {"name": "JavaScript", "bytes": "7625"}]}} {"text": "\n\n 4.0.0\n\n org.apache\n rocketmq-hbase\n pom\n 1.0-SNAPSHOT\n \n rocketmq-hbase-sink\n rocketmq-hbase-source\n \n\n \n UTF-8\n UTF-8\n\n false\n true\n 1.8\n 1.8\n 4.2.0\n 1.4.4\n \n\n \n \n org.apache.rocketmq\n rocketmq-client\n ${rocketmq.version}\n \n \n org.slf4j\n slf4j-api\n 1.7.5\n \n \n org.apache.hbase\n hbase-server\n ${hbase.version}\n \n\n \n \n org.apache.hbase\n hbase-testing-util\n ${hbase.version}\n test\n \n \n org.apache.rocketmq\n rocketmq-namesrv\n ${rocketmq.version}\n test\n \n \n ch.qos.logback\n logback-classic\n \n \n \n \n org.apache.rocketmq\n rocketmq-broker\n ${rocketmq.version}\n test\n \n \n ch.qos.logback\n logback-classic\n \n \n io.netty\n netty-tcnative\n \n \n \n\n \n\n \n \n \n org.apache.maven.plugins\n maven-shade-plugin\n 3.1.1\n \n \n package\n \n shade\n \n \n \n \n \n \n \n maven-checkstyle-plugin\n 2.17\n \n \n verify\n verify\n \n style/rmq_checkstyle.xml\n UTF-8\n true\n true\n false\n false\n \n \n check\n \n \n \n \n \n \n\n", "meta": {"content_hash": "2db8e8db004b0f67e2ed7c5523164a2a", "timestamp": "", "source": "github", "line_count": 123, "max_line_length": 108, "avg_line_length": 37.6260162601626, "alnum_prop": 0.5084269662921348, "repo_name": "StyleTang/incubator-rocketmq-externals", "id": "b2ba196528df2805f6e6a3071aaffdff782fbbad", "size": "4628", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "rocketmq-hbase/pom.xml", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "768"}, {"name": "C", "bytes": "1804"}, {"name": "C++", "bytes": "1075014"}, {"name": "CSS", "bytes": "60479"}, {"name": "Go", "bytes": "276304"}, {"name": "HTML", "bytes": "86942"}, {"name": "Java", "bytes": "554421"}, {"name": "JavaScript", "bytes": "76515"}, {"name": "Makefile", "bytes": "2349"}, {"name": "PHP", "bytes": "29834"}, {"name": "Python", "bytes": "17496"}, {"name": "Scala", "bytes": "60754"}, {"name": "Shell", "bytes": "2982"}]}} {"text": "class CertDbInitializer;\n\n// Factory that manages creation of CertDbInitializer. The initialization is\n// handled differently depending on the environment:\n// * On real ChromeOS devices with TPMs:\n// ** if the user is affiliated: CertDbInitializer is automatically\n// created right after its profile is created. It receives a path to software\n// cert database and slot IDs for Chaps from Ash and uses them.\n// ** if the user is not affiliated: TODO(b/197082753): not officially supported\n// yet, handled as if there's no TPM.\n// * In emulated environments (e.g. when running ChromeOS on Linux) and in the\n// future on ChromeOS without TPMs: Same as real ChromeOS, but Ash only sends\n// the software database path.\n// * In browsertests: CertDbInitializer is not created by default because it\n// requires crosapi mojo interface. It is configured through the\n// `SetCreateWithBrowserContextForTesting()` method. This can be overridden by\n// individual tests or they can create their own instances of the service.\n// * In unittests: CertDbInitializer is not created by default (see\n// `ServiceIsNULLWhileTesting()`).\nclass CertDbInitializerFactory : public BrowserContextKeyedServiceFactory {\n public:\n static CertDbInitializerFactory* GetInstance();\n static CertDbInitializer* GetForBrowserContext(\n content::BrowserContext* context);\n\n // Configures whether CertDbInitializer should be automatically created on\n // profile creation in browser tests.\n // Currently it is configured that in browser tests the service is not created\n // by default. Individual tests can override it when needed.\n void SetCreateWithBrowserContextForTesting(bool should_create);\n\n private:\n friend class base::NoDestructor;\n\n CertDbInitializerFactory();\n ~CertDbInitializerFactory() override = default;\n\n // BrowserStateKeyedServiceFactory\n bool ServiceIsCreatedWithBrowserContext() const override;\n KeyedService* BuildServiceInstanceFor(\n content::BrowserContext* context) const override;\n bool ServiceIsNULLWhileTesting() const override;\n content::BrowserContext* GetBrowserContextToUse(\n content::BrowserContext* context) const override;\n\n bool should_create_with_browser_context_ = true;\n};\n\n#endif // CHROME_BROWSER_LACROS_CERT_DB_INITIALIZER_FACTORY_H_\n", "meta": {"content_hash": "61ceabbd404882a9f131dabf6d3a6508", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 80, "avg_line_length": 46.795918367346935, "alnum_prop": 0.7836894897514174, "repo_name": "ric2b/Vivaldi-browser", "id": "f0bc2dfa2893f0641a44664dca58ae0733ea7e0e", "size": "2699", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "chromium/chrome/browser/lacros/cert_db_initializer_factory.h", "mode": "33188", "license": "bsd-3-clause", "language": []}} {"text": "module Azure::ARM::ServiceBus\n module Models\n #\n # Parameters supplied to the Create Or Update Subscription operation.\n #\n class SubscriptionCreateOrUpdateParameters\n\n include MsRestAzure\n\n # @return [String] Subscription data center location.\n attr_accessor :location\n\n # @return [String] Resource manager type of the resource.\n attr_accessor :type\n\n # @return [DateTime] Last time there was a receive request to this\n # subscription.\n attr_accessor :accessed_at\n\n # @return [String] TimeSpan idle interval after which the topic is\n # automatically deleted. The minimum duration is 5 minutes.\n attr_accessor :auto_delete_on_idle\n\n # @return [MessageCountDetails]\n attr_accessor :count_details\n\n # @return [DateTime] Exact time the message was created.\n attr_accessor :created_at\n\n # @return [String] Default message time to live value. This is the\n # duration after which the message expires, starting from when the\n # message is sent to Service Bus. This is the default value used when\n # TimeToLive is not set on a message itself.\n attr_accessor :default_message_time_to_live\n\n # @return [Boolean] Value that indicates whether a subscription has dead\n # letter support on filter evaluation exceptions.\n attr_accessor :dead_lettering_on_filter_evaluation_exceptions\n\n # @return [Boolean] Value that indicates whether a subscription has dead\n # letter support when a message expires.\n attr_accessor :dead_lettering_on_message_expiration\n\n # @return [Boolean] Value that indicates whether server-side batched\n # operations are enabled.\n attr_accessor :enable_batched_operations\n\n # @return [EntityAvailabilityStatus] Entity availability status for the\n # topic. Possible values include: 'Available', 'Limited', 'Renaming',\n # 'Restoring', 'Unknown'\n attr_accessor :entity_availability_status\n\n # @return [Boolean] Value that indicates whether the entity description\n # is read-only.\n attr_accessor :is_read_only\n\n # @return [String] The lock duration time span for the subscription.\n attr_accessor :lock_duration\n\n # @return [Integer] Number of maximum deliveries.\n attr_accessor :max_delivery_count\n\n # @return [Integer] Number of messages.\n attr_accessor :message_count\n\n # @return [Boolean] Value indicating if a subscription supports the\n # concept of sessions.\n attr_accessor :requires_session\n\n # @return [EntityStatus] Enumerates the possible values for the status of\n # a messaging entity. Possible values include: 'Active', 'Creating',\n # 'Deleting', 'Disabled', 'ReceiveDisabled', 'Renaming', 'Restoring',\n # 'SendDisabled', 'Unknown'\n attr_accessor :status\n\n # @return [DateTime] The exact time the message was updated.\n attr_accessor :updated_at\n\n\n #\n # Mapper for SubscriptionCreateOrUpdateParameters class as Ruby Hash.\n # This will be used for serialization/deserialization.\n #\n def self.mapper()\n {\n required: false,\n serialized_name: 'SubscriptionCreateOrUpdateParameters',\n type: {\n name: 'Composite',\n class_name: 'SubscriptionCreateOrUpdateParameters',\n model_properties: {\n location: {\n required: true,\n serialized_name: 'location',\n type: {\n name: 'String'\n }\n },\n type: {\n required: false,\n serialized_name: 'type',\n type: {\n name: 'String'\n }\n },\n accessed_at: {\n required: false,\n serialized_name: 'properties.accessedAt',\n type: {\n name: 'DateTime'\n }\n },\n auto_delete_on_idle: {\n required: false,\n serialized_name: 'properties.autoDeleteOnIdle',\n type: {\n name: 'String'\n }\n },\n count_details: {\n required: false,\n serialized_name: 'properties.countDetails',\n type: {\n name: 'Composite',\n class_name: 'MessageCountDetails'\n }\n },\n created_at: {\n required: false,\n serialized_name: 'properties.createdAt',\n type: {\n name: 'DateTime'\n }\n },\n default_message_time_to_live: {\n required: false,\n serialized_name: 'properties.defaultMessageTimeToLive',\n type: {\n name: 'String'\n }\n },\n dead_lettering_on_filter_evaluation_exceptions: {\n required: false,\n serialized_name: 'properties.deadLetteringOnFilterEvaluationExceptions',\n type: {\n name: 'Boolean'\n }\n },\n dead_lettering_on_message_expiration: {\n required: false,\n serialized_name: 'properties.deadLetteringOnMessageExpiration',\n type: {\n name: 'Boolean'\n }\n },\n enable_batched_operations: {\n required: false,\n serialized_name: 'properties.enableBatchedOperations',\n type: {\n name: 'Boolean'\n }\n },\n entity_availability_status: {\n required: false,\n serialized_name: 'properties.entityAvailabilityStatus',\n type: {\n name: 'Enum',\n module: 'EntityAvailabilityStatus'\n }\n },\n is_read_only: {\n required: false,\n serialized_name: 'properties.isReadOnly',\n type: {\n name: 'Boolean'\n }\n },\n lock_duration: {\n required: false,\n serialized_name: 'properties.lockDuration',\n type: {\n name: 'String'\n }\n },\n max_delivery_count: {\n required: false,\n serialized_name: 'properties.maxDeliveryCount',\n type: {\n name: 'Number'\n }\n },\n message_count: {\n required: false,\n serialized_name: 'properties.messageCount',\n type: {\n name: 'Number'\n }\n },\n requires_session: {\n required: false,\n serialized_name: 'properties.requiresSession',\n type: {\n name: 'Boolean'\n }\n },\n status: {\n required: false,\n serialized_name: 'properties.status',\n type: {\n name: 'Enum',\n module: 'EntityStatus'\n }\n },\n updated_at: {\n required: false,\n serialized_name: 'properties.updatedAt',\n type: {\n name: 'DateTime'\n }\n }\n }\n }\n }\n end\n end\n end\nend\n", "meta": {"content_hash": "463cf306d2ba57e2d7af248322101698", "timestamp": "", "source": "github", "line_count": 227, "max_line_length": 88, "avg_line_length": 33.44933920704846, "alnum_prop": 0.5073093638877914, "repo_name": "devigned/azure-sdk-for-ruby", "id": "bbdaacd695508b209e59bb962749182086822315", "size": "7764", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "management/azure_mgmt_service_bus/lib/generated/azure_mgmt_service_bus/models/subscription_create_or_update_parameters.rb", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Ruby", "bytes": "11168894"}, {"name": "Shell", "bytes": "157"}]}} {"text": "'use strict';\n/* jshint maxlen: false */\n\n/**\n * @ngdoc overview\n * @name ngAnimate\n * @description\n *\n * # ngAnimate\n *\n * The `ngAnimate` module provides support for JavaScript, CSS3 transition and CSS3 keyframe animation hooks within existing core and custom directives.\n *\n * {@installModule animate}\n *\n *
\n *\n * # Usage\n *\n * To see animations in action, all that is required is to define the appropriate CSS classes\n * or to register a JavaScript animation via the myModule.animation() function. The directives that support animation automatically are:\n * `ngRepeat`, `ngInclude`, `ngIf`, `ngSwitch`, `ngShow`, `ngHide`, `ngView` and `ngClass`. Custom directives can take advantage of animation\n * by using the `$animate` service.\n *\n * Below is a more detailed breakdown of the supported animation events provided by pre-existing ng directives:\n *\n * | Directive | Supported Animations |\n * |---------------------------------------------------------- |----------------------------------------------------|\n * | {@link ng.directive:ngRepeat#usage_animations ngRepeat} | enter, leave and move |\n * | {@link ngRoute.directive:ngView#usage_animations ngView} | enter and leave |\n * | {@link ng.directive:ngInclude#usage_animations ngInclude} | enter and leave |\n * | {@link ng.directive:ngSwitch#usage_animations ngSwitch} | enter and leave |\n * | {@link ng.directive:ngIf#usage_animations ngIf} | enter and leave |\n * | {@link ng.directive:ngClass#usage_animations ngClass} | add and remove |\n * | {@link ng.directive:ngShow#usage_animations ngShow & ngHide} | add and remove (the ng-hide class value) |\n *\n * You can find out more information about animations upon visiting each directive page.\n *\n * Below is an example of how to apply animations to a directive that supports animation hooks:\n *\n *
\n * \n *\n * \n * \n * 
\n *\n * Keep in mind that if an animation is running, any child elements cannot be animated until the parent element's\n * animation has completed.\n *\n *

CSS-defined Animations

\n * The animate service will automatically apply two CSS classes to the animated element and these two CSS classes\n * are designed to contain the start and end CSS styling. Both CSS transitions and keyframe animations are supported\n * and can be used to play along with this naming structure.\n *\n * The following code below demonstrates how to perform animations using **CSS transitions** with Angular:\n *\n *
\n * \n *\n * 
\n *
\n *
\n *
\n *\n * The following code below demonstrates how to perform animations using **CSS animations** with Angular:\n *\n *
\n * \n *\n * 
\n *
\n *
\n *
\n *\n * Both CSS3 animations and transitions can be used together and the animate service will figure out the correct duration and delay timing.\n *\n * Upon DOM mutation, the event class is added first (something like `ng-enter`), then the browser prepares itself to add\n * the active class (in this case `ng-enter-active`) which then triggers the animation. The animation module will automatically\n * detect the CSS code to determine when the animation ends. Once the animation is over then both CSS classes will be\n * removed from the DOM. If a browser does not support CSS transitions or CSS animations then the animation will start and end\n * immediately resulting in a DOM element that is at its final state. This final state is when the DOM element\n * has no CSS transition/animation classes applied to it.\n *\n *

CSS Staggering Animations

\n * A Staggering animation is a collection of animations that are issued with a slight delay in between each successive operation resulting in a\n * curtain-like effect. The ngAnimate module, as of 1.2.0, supports staggering animations and the stagger effect can be\n * performed by creating a **ng-EVENT-stagger** CSS class and attaching that class to the base CSS class used for\n * the animation. The style property expected within the stagger class can either be a **transition-delay** or an\n * **animation-delay** property (or both if your animation contains both transitions and keyframe animations).\n *\n *
\n * .my-animation.ng-enter {\n *   /* standard transition code */\n *   -webkit-transition: 1s linear all;\n *   transition: 1s linear all;\n *   opacity:0;\n * }\n * .my-animation.ng-enter-stagger {\n *   /* this will have a 100ms delay between each successive leave animation */\n *   -webkit-transition-delay: 0.1s;\n *   transition-delay: 0.1s;\n *\n *   /* in case the stagger doesn't work then these two values\n *    must be set to 0 to avoid an accidental CSS inheritance */\n *   -webkit-transition-duration: 0s;\n *   transition-duration: 0s;\n * }\n * .my-animation.ng-enter.ng-enter-active {\n *   /* standard transition styles */\n *   opacity:1;\n * }\n * 
\n *\n * Staggering animations work by default in ngRepeat (so long as the CSS class is defined). Outside of ngRepeat, to use staggering animations\n * on your own, they can be triggered by firing multiple calls to the same event on $animate. However, the restrictions surrounding this\n * are that each of the elements must have the same CSS className value as well as the same parent element. A stagger operation\n * will also be reset if more than 10ms has passed after the last animation has been fired.\n *\n * The following code will issue the **ng-leave-stagger** event on the element provided:\n *\n *
\n * var kids = parent.children();\n *\n * $animate.leave(kids[0]); //stagger index=0\n * $animate.leave(kids[1]); //stagger index=1\n * $animate.leave(kids[2]); //stagger index=2\n * $animate.leave(kids[3]); //stagger index=3\n * $animate.leave(kids[4]); //stagger index=4\n *\n * $timeout(function() {\n *   //stagger has reset itself\n *   $animate.leave(kids[5]); //stagger index=0\n *   $animate.leave(kids[6]); //stagger index=1\n * }, 100, false);\n * 
\n *\n * Stagger animations are currently only supported within CSS-defined animations.\n *\n *

JavaScript-defined Animations

\n * In the event that you do not want to use CSS3 transitions or CSS3 animations or if you wish to offer animations on browsers that do not\n * yet support CSS transitions/animations, then you can make use of JavaScript animations defined inside of your AngularJS module.\n *\n *
\n * //!annotate=\"YourApp\" Your AngularJS Module|Replace this or ngModule with the module that you used to define your application.\n * var ngModule = angular.module('YourApp', []);\n * ngModule.animation('.my-crazy-animation', function() {\n *   return {\n *     enter: function(element, done) {\n *       //run the animation here and call done when the animation is complete\n *       return function(cancelled) {\n *         //this (optional) function will be called when the animation\n *         //completes or when the animation is cancelled (the cancelled\n *         //flag will be set to true if cancelled).\n *       }\n *     }\n *     leave: function(element, done) { },\n *     move: function(element, done) { },\n *\n *     //animation that can be triggered before the class is added\n *     beforeAddClass: function(element, className, done) { },\n *\n *     //animation that can be triggered after the class is added\n *     addClass: function(element, className, done) { },\n *\n *     //animation that can be triggered before the class is removed\n *     beforeRemoveClass: function(element, className, done) { },\n *\n *     //animation that can be triggered after the class is removed\n *     removeClass: function(element, className, done) { }\n *   }\n * });\n * 
\n *\n * JavaScript-defined animations are created with a CSS-like class selector and a collection of events which are set to run\n * a javascript callback function. When an animation is triggered, $animate will look for a matching animation which fits\n * the element's CSS class attribute value and then run the matching animation event function (if found).\n * In other words, if the CSS classes present on the animated element match any of the JavaScript animations then the callback function will\n * be executed. It should be also noted that only simple, single class selectors are allowed (compound class selectors are not supported).\n *\n * Within a JavaScript animation, an object containing various event callback animation functions is expected to be returned.\n * As explained above, these callbacks are triggered based on the animation event. Therefore if an enter animation is run,\n * and the JavaScript animation is found, then the enter callback will handle that animation (in addition to the CSS keyframe animation\n * or transition code that is defined via a stylesheet).\n *\n */\n\nangular.module('ngAnimate', ['ng'])\n\n /**\n * @ngdoc object\n * @name ngAnimate.$animateProvider\n * @description\n *\n * The `$animateProvider` allows developers to register JavaScript animation event handlers directly inside of a module.\n * When an animation is triggered, the $animate service will query the $animate service to find any animations that match\n * the provided name value.\n *\n * Requires the {@link ngAnimate `ngAnimate`} module to be installed.\n *\n * Please visit the {@link ngAnimate `ngAnimate`} module overview page learn more about how to use animations in your application.\n *\n */\n .config(['$provide', '$animateProvider', function($provide, $animateProvider) {\n var noop = angular.noop;\n var forEach = angular.forEach;\n var selectors = $animateProvider.$$selectors;\n\n var ELEMENT_NODE = 1;\n var NG_ANIMATE_STATE = '$$ngAnimateState';\n var NG_ANIMATE_CLASS_NAME = 'ng-animate';\n var rootAnimateState = {running: true};\n\n $provide.decorator('$animate', ['$delegate', '$injector', '$sniffer', '$rootElement', '$timeout', '$rootScope', '$document',\n function($delegate, $injector, $sniffer, $rootElement, $timeout, $rootScope, $document) {\n\n $rootElement.data(NG_ANIMATE_STATE, rootAnimateState);\n\n // disable animations during bootstrap, but once we bootstrapped, enable animations\n $rootScope.$$postDigest(function() {\n rootAnimateState.running = false;\n });\n\n function lookup(name) {\n if (name) {\n var matches = [],\n flagMap = {},\n classes = name.substr(1).split('.');\n\n //the empty string value is the default animation\n //operation which performs CSS transition and keyframe\n //animations sniffing. This is always included for each\n //element animation procedure if the browser supports\n //transitions and/or keyframe animations\n if ($sniffer.transitions || $sniffer.animations) {\n classes.push('');\n }\n\n for(var i=0; i < classes.length; i++) {\n var klass = classes[i],\n selectorFactoryName = selectors[klass];\n if(selectorFactoryName && !flagMap[klass]) {\n matches.push($injector.get(selectorFactoryName));\n flagMap[klass] = true;\n }\n }\n return matches;\n }\n }\n\n /**\n * @ngdoc object\n * @name ngAnimate.$animate\n * @function\n *\n * @description\n * The `$animate` service provides animation detection support while performing DOM operations (enter, leave and move) as well as during addClass and removeClass operations.\n * When any of these operations are run, the $animate service\n * will examine any JavaScript-defined animations (which are defined by using the $animateProvider provider object)\n * as well as any CSS-defined animations against the CSS classes present on the element once the DOM operation is run.\n *\n * The `$animate` service is used behind the scenes with pre-existing directives and animation with these directives\n * will work out of the box without any extra configuration.\n *\n * Requires the {@link ngAnimate `ngAnimate`} module to be installed.\n *\n * Please visit the {@link ngAnimate `ngAnimate`} module overview page learn more about how to use animations in your application.\n *\n */\n return {\n /**\n * @ngdoc function\n * @name ngAnimate.$animate#enter\n * @methodOf ngAnimate.$animate\n * @function\n *\n * @description\n * Appends the element to the parentElement element that resides in the document and then runs the enter animation. Once\n * the animation is started, the following CSS classes will be present on the element for the duration of the animation:\n *\n * Below is a breakdown of each step that occurs during enter animation:\n *\n * | Animation Step | What the element class attribute looks like |\n * |----------------------------------------------------------------------------------------------|---------------------------------------------|\n * | 1. $animate.enter(...) is called | class=\"my-animation\" |\n * | 2. element is inserted into the parentElement element or beside the afterElement element | class=\"my-animation\" |\n * | 3. $animate runs any JavaScript-defined animations on the element | class=\"my-animation ng-animate\" |\n * | 4. the .ng-enter class is added to the element | class=\"my-animation ng-animate ng-enter\" |\n * | 5. $animate scans the element styles to get the CSS transition/animation duration and delay | class=\"my-animation ng-animate ng-enter\" |\n * | 6. $animate waits for 10ms (this performs a reflow) | class=\"my-animation ng-animate ng-enter\" |\n * | 7. the .ng-enter-active and .ng-animate-active classes are added (this triggers the CSS transition/animation) | class=\"my-animation ng-animate ng-animate-active ng-enter ng-enter-active\" |\n * | 8. $animate waits for X milliseconds for the animation to complete | class=\"my-animation ng-animate ng-animate-active ng-enter ng-enter-active\" |\n * | 9. The animation ends and all generated CSS classes are removed from the element | class=\"my-animation\" |\n * | 10. The doneCallback() callback is fired (if provided) | class=\"my-animation\" |\n *\n * @param {jQuery/jqLite element} element the element that will be the focus of the enter animation\n * @param {jQuery/jqLite element} parentElement the parent element of the element that will be the focus of the enter animation\n * @param {jQuery/jqLite element} afterElement the sibling element (which is the previous element) of the element that will be the focus of the enter animation\n * @param {function()=} doneCallback the callback function that will be called once the animation is complete\n */\n enter : function(element, parentElement, afterElement, doneCallback) {\n this.enabled(false, element);\n $delegate.enter(element, parentElement, afterElement);\n $rootScope.$$postDigest(function() {\n performAnimation('enter', 'ng-enter', element, parentElement, afterElement, noop, doneCallback);\n });\n },\n\n /**\n * @ngdoc function\n * @name ngAnimate.$animate#leave\n * @methodOf ngAnimate.$animate\n * @function\n *\n * @description\n * Runs the leave animation operation and, upon completion, removes the element from the DOM. Once\n * the animation is started, the following CSS classes will be added for the duration of the animation:\n *\n * Below is a breakdown of each step that occurs during enter animation:\n *\n * | Animation Step | What the element class attribute looks like |\n * |----------------------------------------------------------------------------------------------|---------------------------------------------|\n * | 1. $animate.leave(...) is called | class=\"my-animation\" |\n * | 2. $animate runs any JavaScript-defined animations on the element | class=\"my-animation ng-animate\" |\n * | 3. the .ng-leave class is added to the element | class=\"my-animation ng-animate ng-leave\" |\n * | 4. $animate scans the element styles to get the CSS transition/animation duration and delay | class=\"my-animation ng-animate ng-leave\" |\n * | 5. $animate waits for 10ms (this performs a reflow) | class=\"my-animation ng-animate ng-leave\" |\n * | 6. the .ng-leave-active and .ng-animate-active classes is added (this triggers the CSS transition/animation) | class=\"my-animation ng-animate ng-animate-active ng-leave ng-leave-active\" |\n * | 7. $animate waits for X milliseconds for the animation to complete | class=\"my-animation ng-animate ng-animate-active ng-leave ng-leave-active\" |\n * | 8. The animation ends and all generated CSS classes are removed from the element | class=\"my-animation\" |\n * | 9. The element is removed from the DOM | ... |\n * | 10. The doneCallback() callback is fired (if provided) | ... |\n *\n * @param {jQuery/jqLite element} element the element that will be the focus of the leave animation\n * @param {function()=} doneCallback the callback function that will be called once the animation is complete\n */\n leave : function(element, doneCallback) {\n cancelChildAnimations(element);\n this.enabled(false, element);\n $rootScope.$$postDigest(function() {\n performAnimation('leave', 'ng-leave', element, null, null, function() {\n $delegate.leave(element);\n }, doneCallback);\n });\n },\n\n /**\n * @ngdoc function\n * @name ngAnimate.$animate#move\n * @methodOf ngAnimate.$animate\n * @function\n *\n * @description\n * Fires the move DOM operation. Just before the animation starts, the animate service will either append it into the parentElement container or\n * add the element directly after the afterElement element if present. Then the move animation will be run. Once\n * the animation is started, the following CSS classes will be added for the duration of the animation:\n *\n * Below is a breakdown of each step that occurs during move animation:\n *\n * | Animation Step | What the element class attribute looks like |\n * |----------------------------------------------------------------------------------------------|---------------------------------------------|\n * | 1. $animate.move(...) is called | class=\"my-animation\" |\n * | 2. element is moved into the parentElement element or beside the afterElement element | class=\"my-animation\" |\n * | 3. $animate runs any JavaScript-defined animations on the element | class=\"my-animation ng-animate\" |\n * | 4. the .ng-move class is added to the element | class=\"my-animation ng-animate ng-move\" |\n * | 5. $animate scans the element styles to get the CSS transition/animation duration and delay | class=\"my-animation ng-animate ng-move\" |\n * | 6. $animate waits for 10ms (this performs a reflow) | class=\"my-animation ng-animate ng-move\" |\n * | 7. the .ng-move-active and .ng-animate-active classes is added (this triggers the CSS transition/animation) | class=\"my-animation ng-animate ng-animate-active ng-move ng-move-active\" |\n * | 8. $animate waits for X milliseconds for the animation to complete | class=\"my-animation ng-animate ng-animate-active ng-move ng-move-active\" |\n * | 9. The animation ends and all generated CSS classes are removed from the element | class=\"my-animation\" |\n * | 10. The doneCallback() callback is fired (if provided) | class=\"my-animation\" |\n *\n * @param {jQuery/jqLite element} element the element that will be the focus of the move animation\n * @param {jQuery/jqLite element} parentElement the parentElement element of the element that will be the focus of the move animation\n * @param {jQuery/jqLite element} afterElement the sibling element (which is the previous element) of the element that will be the focus of the move animation\n * @param {function()=} doneCallback the callback function that will be called once the animation is complete\n */\n move : function(element, parentElement, afterElement, doneCallback) {\n cancelChildAnimations(element);\n this.enabled(false, element);\n $delegate.move(element, parentElement, afterElement);\n $rootScope.$$postDigest(function() {\n performAnimation('move', 'ng-move', element, parentElement, afterElement, noop, doneCallback);\n });\n },\n\n /**\n * @ngdoc function\n * @name ngAnimate.$animate#addClass\n * @methodOf ngAnimate.$animate\n *\n * @description\n * Triggers a custom animation event based off the className variable and then attaches the className value to the element as a CSS class.\n * Unlike the other animation methods, the animate service will suffix the className value with {@type -add} in order to provide\n * the animate service the setup and active CSS classes in order to trigger the animation (this will be skipped if no CSS transitions\n * or keyframes are defined on the -add or base CSS class).\n *\n * Below is a breakdown of each step that occurs during addClass animation:\n *\n * | Animation Step | What the element class attribute looks like |\n * |------------------------------------------------------------------------------------------------|---------------------------------------------|\n * | 1. $animate.addClass(element, 'super') is called | class=\"my-animation\" |\n * | 2. $animate runs any JavaScript-defined animations on the element | class=\"my-animation ng-animate\" |\n * | 3. the .super-add class are added to the element | class=\"my-animation ng-animate super-add\" |\n * | 4. $animate scans the element styles to get the CSS transition/animation duration and delay | class=\"my-animation ng-animate super-add\" |\n * | 5. $animate waits for 10ms (this performs a reflow) | class=\"my-animation ng-animate super-add\" |\n * | 6. the .super, .super-add-active and .ng-animate-active classes are added (this triggers the CSS transition/animation) | class=\"my-animation ng-animate ng-animate-active super super-add super-add-active\" |\n * | 7. $animate waits for X milliseconds for the animation to complete | class=\"my-animation super-add super-add-active\" |\n * | 8. The animation ends and all generated CSS classes are removed from the element | class=\"my-animation super\" |\n * | 9. The super class is kept on the element | class=\"my-animation super\" |\n * | 10. The doneCallback() callback is fired (if provided) | class=\"my-animation super\" |\n *\n * @param {jQuery/jqLite element} element the element that will be animated\n * @param {string} className the CSS class that will be added to the element and then animated\n * @param {function()=} doneCallback the callback function that will be called once the animation is complete\n */\n addClass : function(element, className, doneCallback) {\n performAnimation('addClass', className, element, null, null, function() {\n $delegate.addClass(element, className);\n }, doneCallback);\n },\n\n /**\n * @ngdoc function\n * @name ngAnimate.$animate#removeClass\n * @methodOf ngAnimate.$animate\n *\n * @description\n * Triggers a custom animation event based off the className variable and then removes the CSS class provided by the className value\n * from the element. Unlike the other animation methods, the animate service will suffix the className value with {@type -remove} in\n * order to provide the animate service the setup and active CSS classes in order to trigger the animation (this will be skipped if\n * no CSS transitions or keyframes are defined on the -remove or base CSS classes).\n *\n * Below is a breakdown of each step that occurs during removeClass animation:\n *\n * | Animation Step | What the element class attribute looks like |\n * |-----------------------------------------------------------------------------------------------|---------------------------------------------|\n * | 1. $animate.removeClass(element, 'super') is called | class=\"my-animation super\" |\n * | 2. $animate runs any JavaScript-defined animations on the element | class=\"my-animation super ng-animate\" |\n * | 3. the .super-remove class are added to the element | class=\"my-animation super ng-animate super-remove\"|\n * | 4. $animate scans the element styles to get the CSS transition/animation duration and delay | class=\"my-animation super ng-animate super-remove\" |\n * | 5. $animate waits for 10ms (this performs a reflow) | class=\"my-animation super ng-animate super-remove\" |\n * | 6. the .super-remove-active and .ng-animate-active classes are added and .super is removed (this triggers the CSS transition/animation) | class=\"my-animation ng-animate ng-animate-active super-remove super-remove-active\" |\n * | 7. $animate waits for X milliseconds for the animation to complete | class=\"my-animation ng-animate ng-animate-active super-remove super-remove-active\" |\n * | 8. The animation ends and all generated CSS classes are removed from the element | class=\"my-animation\" |\n * | 9. The doneCallback() callback is fired (if provided) | class=\"my-animation\" |\n *\n *\n * @param {jQuery/jqLite element} element the element that will be animated\n * @param {string} className the CSS class that will be animated and then removed from the element\n * @param {function()=} doneCallback the callback function that will be called once the animation is complete\n */\n removeClass : function(element, className, doneCallback) {\n performAnimation('removeClass', className, element, null, null, function() {\n $delegate.removeClass(element, className);\n }, doneCallback);\n },\n\n /**\n * @ngdoc function\n * @name ngAnimate.$animate#enabled\n * @methodOf ngAnimate.$animate\n * @function\n *\n * @param {boolean=} value If provided then set the animation on or off.\n * @param {jQuery/jqLite element=} element If provided then the element will be used to represent the enable/disable operation\n * @return {boolean} Current animation state.\n *\n * @description\n * Globally enables/disables animations.\n *\n */\n enabled : function(value, element) {\n switch(arguments.length) {\n case 2:\n if(value) {\n cleanup(element);\n } else {\n var data = element.data(NG_ANIMATE_STATE) || {};\n data.disabled = true;\n element.data(NG_ANIMATE_STATE, data);\n }\n break;\n\n case 1:\n rootAnimateState.disabled = !value;\n break;\n\n default:\n value = !rootAnimateState.disabled;\n break;\n }\n return !!value;\n }\n };\n\n /*\n all animations call this shared animation triggering function internally.\n The animationEvent variable refers to the JavaScript animation event that will be triggered\n and the className value is the name of the animation that will be applied within the\n CSS code. Element, parentElement and afterElement are provided DOM elements for the animation\n and the onComplete callback will be fired once the animation is fully complete.\n */\n function performAnimation(animationEvent, className, element, parentElement, afterElement, domOperation, doneCallback) {\n var currentClassName = element.attr('class') || '';\n var classes = currentClassName + ' ' + className;\n var animationLookup = (' ' + classes).replace(/\\s+/g,'.');\n if (!parentElement) {\n parentElement = afterElement ? afterElement.parent() : element.parent();\n }\n\n var matches = lookup(animationLookup);\n var isClassBased = animationEvent == 'addClass' || animationEvent == 'removeClass';\n var ngAnimateState = element.data(NG_ANIMATE_STATE) || {};\n\n //skip the animation if animations are disabled, a parent is already being animated,\n //the element is not currently attached to the document body or then completely close\n //the animation if any matching animations are not found at all.\n //NOTE: IE8 + IE9 should close properly (run closeAnimation()) in case a NO animation is not found.\n if (animationsDisabled(element, parentElement) || matches.length === 0) {\n fireDOMOperation();\n closeAnimation();\n return;\n }\n\n var animations = [];\n //only add animations if the currently running animation is not structural\n //or if there is no animation running at all\n if(!ngAnimateState.running || !(isClassBased && ngAnimateState.structural)) {\n forEach(matches, function(animation) {\n //add the animation to the queue to if it is allowed to be cancelled\n if(!animation.allowCancel || animation.allowCancel(element, animationEvent, className)) {\n var beforeFn, afterFn = animation[animationEvent];\n\n //Special case for a leave animation since there is no point in performing an\n //animation on a element node that has already been removed from the DOM\n if(animationEvent == 'leave') {\n beforeFn = afterFn;\n afterFn = null; //this must be falsy so that the animation is skipped for leave\n } else {\n beforeFn = animation['before' + animationEvent.charAt(0).toUpperCase() + animationEvent.substr(1)];\n }\n animations.push({\n before : beforeFn,\n after : afterFn\n });\n }\n });\n }\n\n //this would mean that an animation was not allowed so let the existing\n //animation do it's thing and close this one early\n if(animations.length === 0) {\n fireDOMOperation();\n fireDoneCallbackAsync();\n return;\n }\n\n //this value will be searched for class-based CSS className lookup. Therefore,\n //we prefix and suffix the current className value with spaces to avoid substring\n //lookups of className tokens\n var futureClassName = ' ' + currentClassName + ' ';\n if(ngAnimateState.running) {\n //if an animation is currently running on the element then lets take the steps\n //to cancel that animation and fire any required callbacks\n $timeout.cancel(ngAnimateState.closeAnimationTimeout);\n cleanup(element);\n cancelAnimations(ngAnimateState.animations);\n\n //if the class is removed during the reflow then it will revert the styles temporarily\n //back to the base class CSS styling causing a jump-like effect to occur. This check\n //here ensures that the domOperation is only performed after the reflow has commenced\n if(ngAnimateState.beforeComplete) {\n (ngAnimateState.done || noop)(true);\n } else if(isClassBased && !ngAnimateState.structural) {\n //class-based animations will compare element className values after cancelling the\n //previous animation to see if the element properties already contain the final CSS\n //class and if so then the animation will be skipped. Since the domOperation will\n //be performed only after the reflow is complete then our element's className value\n //will be invalid. Therefore the same string manipulation that would occur within the\n //DOM operation will be performed below so that the class comparison is valid...\n futureClassName = ngAnimateState.event == 'removeClass' ?\n futureClassName.replace(ngAnimateState.className, '') :\n futureClassName + ngAnimateState.className + ' ';\n }\n }\n\n //There is no point in perform a class-based animation if the element already contains\n //(on addClass) or doesn't contain (on removeClass) the className being animated.\n //The reason why this is being called after the previous animations are cancelled\n //is so that the CSS classes present on the element can be properly examined.\n var classNameToken = ' ' + className + ' ';\n if((animationEvent == 'addClass' && futureClassName.indexOf(classNameToken) >= 0) ||\n (animationEvent == 'removeClass' && futureClassName.indexOf(classNameToken) == -1)) {\n fireDOMOperation();\n fireDoneCallbackAsync();\n return;\n }\n\n //the ng-animate class does nothing, but it's here to allow for\n //parent animations to find and cancel child animations when needed\n element.addClass(NG_ANIMATE_CLASS_NAME);\n\n element.data(NG_ANIMATE_STATE, {\n running:true,\n event:animationEvent,\n className:className,\n structural:!isClassBased,\n animations:animations,\n done:onBeforeAnimationsComplete\n });\n\n //first we run the before animations and when all of those are complete\n //then we perform the DOM operation and run the next set of animations\n invokeRegisteredAnimationFns(animations, 'before', onBeforeAnimationsComplete);\n\n function onBeforeAnimationsComplete(cancelled) {\n fireDOMOperation();\n if(cancelled === true) {\n closeAnimation();\n return;\n }\n\n //set the done function to the final done function\n //so that the DOM event won't be executed twice by accident\n //if the after animation is cancelled as well\n var data = element.data(NG_ANIMATE_STATE);\n if(data) {\n data.done = closeAnimation;\n element.data(NG_ANIMATE_STATE, data);\n }\n invokeRegisteredAnimationFns(animations, 'after', closeAnimation);\n }\n\n function invokeRegisteredAnimationFns(animations, phase, allAnimationFnsComplete) {\n var endFnName = phase + 'End';\n forEach(animations, function(animation, index) {\n var animationPhaseCompleted = function() {\n progress(index, phase);\n };\n\n //there are no before functions for enter + move since the DOM\n //operations happen before the performAnimation method fires\n if(phase == 'before' && (animationEvent == 'enter' || animationEvent == 'move')) {\n animationPhaseCompleted();\n return;\n }\n\n if(animation[phase]) {\n animation[endFnName] = isClassBased ?\n animation[phase](element, className, animationPhaseCompleted) :\n animation[phase](element, animationPhaseCompleted);\n } else {\n animationPhaseCompleted();\n }\n });\n\n function progress(index, phase) {\n var phaseCompletionFlag = phase + 'Complete';\n var currentAnimation = animations[index];\n currentAnimation[phaseCompletionFlag] = true;\n (currentAnimation[endFnName] || noop)();\n\n for(var i=0;i 0 ? '; ' : '') + style;\n node.setAttribute('style', newStyle);\n return oldStyle;\n }\n\n function getElementAnimationDetails(element, cacheKey) {\n var data = cacheKey ? lookupCache[cacheKey] : null;\n if(!data) {\n var transitionDuration = 0;\n var transitionDelay = 0;\n var animationDuration = 0;\n var animationDelay = 0;\n var transitionDelayStyle;\n var animationDelayStyle;\n var transitionDurationStyle;\n var transitionPropertyStyle;\n\n //we want all the styles defined before and after\n forEach(element, function(element) {\n if (element.nodeType == ELEMENT_NODE) {\n var elementStyles = $window.getComputedStyle(element) || {};\n\n transitionDurationStyle = elementStyles[TRANSITION_PROP + DURATION_KEY];\n\n transitionDuration = Math.max(parseMaxTime(transitionDurationStyle), transitionDuration);\n\n transitionPropertyStyle = elementStyles[TRANSITION_PROP + PROPERTY_KEY];\n\n transitionDelayStyle = elementStyles[TRANSITION_PROP + DELAY_KEY];\n\n transitionDelay = Math.max(parseMaxTime(transitionDelayStyle), transitionDelay);\n\n animationDelayStyle = elementStyles[ANIMATION_PROP + DELAY_KEY];\n\n animationDelay = Math.max(parseMaxTime(animationDelayStyle), animationDelay);\n\n var aDuration = parseMaxTime(elementStyles[ANIMATION_PROP + DURATION_KEY]);\n\n if(aDuration > 0) {\n aDuration *= parseInt(elementStyles[ANIMATION_PROP + ANIMATION_ITERATION_COUNT_KEY], 10) || 1;\n }\n\n animationDuration = Math.max(aDuration, animationDuration);\n }\n });\n data = {\n total : 0,\n transitionPropertyStyle: transitionPropertyStyle,\n transitionDurationStyle: transitionDurationStyle,\n transitionDelayStyle: transitionDelayStyle,\n transitionDelay: transitionDelay,\n transitionDuration: transitionDuration,\n animationDelayStyle: animationDelayStyle,\n animationDelay: animationDelay,\n animationDuration: animationDuration\n };\n if(cacheKey) {\n lookupCache[cacheKey] = data;\n }\n }\n return data;\n }\n\n function parseMaxTime(str) {\n var maxValue = 0;\n var values = angular.isString(str) ?\n str.split(/\\s*,\\s*/) :\n [];\n forEach(values, function(value) {\n maxValue = Math.max(parseFloat(value) || 0, maxValue);\n });\n return maxValue;\n }\n\n function getCacheKey(element) {\n var parentElement = element.parent();\n var parentID = parentElement.data(NG_ANIMATE_PARENT_KEY);\n if(!parentID) {\n parentElement.data(NG_ANIMATE_PARENT_KEY, ++parentCounter);\n parentID = parentCounter;\n }\n return parentID + '-' + element[0].className;\n }\n\n function animateSetup(element, className) {\n var cacheKey = getCacheKey(element);\n var eventCacheKey = cacheKey + ' ' + className;\n var stagger = {};\n var ii = lookupCache[eventCacheKey] ? ++lookupCache[eventCacheKey].total : 0;\n\n if(ii > 0) {\n var staggerClassName = className + '-stagger';\n var staggerCacheKey = cacheKey + ' ' + staggerClassName;\n var applyClasses = !lookupCache[staggerCacheKey];\n\n applyClasses && element.addClass(staggerClassName);\n\n stagger = getElementAnimationDetails(element, staggerCacheKey);\n\n applyClasses && element.removeClass(staggerClassName);\n }\n\n element.addClass(className);\n\n var timings = getElementAnimationDetails(element, eventCacheKey);\n\n /* there is no point in performing a reflow if the animation\n timeout is empty (this would cause a flicker bug normally\n in the page. There is also no point in performing an animation\n that only has a delay and no duration */\n var maxDuration = Math.max(timings.transitionDuration, timings.animationDuration);\n if(maxDuration === 0) {\n element.removeClass(className);\n return false;\n }\n\n var node = element[0];\n //temporarily disable the transition so that the enter styles\n //don't animate twice (this is here to avoid a bug in Chrome/FF).\n var activeClassName = '';\n if(timings.transitionDuration > 0) {\n element.addClass(NG_ANIMATE_FALLBACK_CLASS_NAME);\n activeClassName += NG_ANIMATE_FALLBACK_ACTIVE_CLASS_NAME + ' ';\n blockTransitions(element);\n } else {\n blockKeyframeAnimations(element);\n }\n\n forEach(className.split(' '), function(klass, i) {\n activeClassName += (i > 0 ? ' ' : '') + klass + '-active';\n });\n\n element.data(NG_ANIMATE_CSS_DATA_KEY, {\n className : className,\n activeClassName : activeClassName,\n maxDuration : maxDuration,\n classes : className + ' ' + activeClassName,\n timings : timings,\n stagger : stagger,\n ii : ii\n });\n\n return true;\n }\n\n function blockTransitions(element) {\n element[0].style[TRANSITION_PROP + PROPERTY_KEY] = 'none';\n }\n\n function blockKeyframeAnimations(element) {\n element[0].style[ANIMATION_PROP] = 'none 0s';\n }\n\n function unblockTransitions(element) {\n var node = element[0], prop = TRANSITION_PROP + PROPERTY_KEY;\n if(node.style[prop] && node.style[prop].length > 0) {\n node.style[prop] = '';\n }\n }\n\n function unblockKeyframeAnimations(element) {\n element[0].style[ANIMATION_PROP] = '';\n }\n\n function animateRun(element, className, activeAnimationComplete) {\n var data = element.data(NG_ANIMATE_CSS_DATA_KEY);\n if(!element.hasClass(className) || !data) {\n activeAnimationComplete();\n return;\n }\n\n var node = element[0];\n var timings = data.timings;\n var stagger = data.stagger;\n var maxDuration = data.maxDuration;\n var activeClassName = data.activeClassName;\n var maxDelayTime = Math.max(timings.transitionDelay, timings.animationDelay) * 1000;\n var startTime = Date.now();\n var css3AnimationEvents = ANIMATIONEND_EVENT + ' ' + TRANSITIONEND_EVENT;\n var formerStyle;\n var ii = data.ii;\n\n var applyFallbackStyle, style = '';\n if(timings.transitionDuration > 0) {\n var propertyStyle = timings.transitionPropertyStyle;\n if(propertyStyle.indexOf('all') == -1) {\n applyFallbackStyle = true;\n var fallbackProperty = $sniffer.msie ? '-ms-zoom' : 'border-spacing';\n style += CSS_PREFIX + 'transition-property: ' + propertyStyle + ', ' + fallbackProperty + '; ';\n style += CSS_PREFIX + 'transition-duration: ' + timings.transitionDurationStyle + ', ' + timings.transitionDuration + 's; ';\n }\n } else {\n unblockKeyframeAnimations(element);\n }\n\n if(ii > 0) {\n if(stagger.transitionDelay > 0 && stagger.transitionDuration === 0) {\n var delayStyle = timings.transitionDelayStyle;\n if(applyFallbackStyle) {\n delayStyle += ', ' + timings.transitionDelay + 's';\n }\n\n style += CSS_PREFIX + 'transition-delay: ' +\n prepareStaggerDelay(delayStyle, stagger.transitionDelay, ii) + '; ';\n }\n\n if(stagger.animationDelay > 0 && stagger.animationDuration === 0) {\n style += CSS_PREFIX + 'animation-delay: ' +\n prepareStaggerDelay(timings.animationDelayStyle, stagger.animationDelay, ii) + '; ';\n }\n }\n\n if(style.length > 0) {\n formerStyle = applyStyle(node, style);\n }\n\n element.on(css3AnimationEvents, onAnimationProgress);\n element.addClass(activeClassName);\n\n // This will automatically be called by $animate so\n // there is no need to attach this internally to the\n // timeout done method.\n return function onEnd(cancelled) {\n element.off(css3AnimationEvents, onAnimationProgress);\n element.removeClass(activeClassName);\n animateClose(element, className);\n if(formerStyle != null) {\n formerStyle.length > 0 ?\n node.setAttribute('style', formerStyle) :\n node.removeAttribute('style');\n }\n };\n\n function onAnimationProgress(event) {\n event.stopPropagation();\n var ev = event.originalEvent || event;\n var timeStamp = ev.$manualTimeStamp || ev.timeStamp || Date.now();\n /* $manualTimeStamp is a mocked timeStamp value which is set\n * within browserTrigger(). This is only here so that tests can\n * mock animations properly. Real events fallback to event.timeStamp,\n * or, if they don't, then a timeStamp is automatically created for them.\n * We're checking to see if the timeStamp surpasses the expected delay,\n * but we're using elapsedTime instead of the timeStamp on the 2nd\n * pre-condition since animations sometimes close off early */\n if(Math.max(timeStamp - startTime, 0) >= maxDelayTime && ev.elapsedTime >= maxDuration) {\n activeAnimationComplete();\n }\n }\n }\n\n function prepareStaggerDelay(delayStyle, staggerDelay, index) {\n var style = '';\n forEach(delayStyle.split(','), function(val, i) {\n style += (i > 0 ? ',' : '') +\n (index * staggerDelay + parseInt(val, 10)) + 's';\n });\n return style;\n }\n\n function animateBefore(element, className) {\n if(animateSetup(element, className)) {\n return function(cancelled) {\n cancelled && animateClose(element, className);\n };\n }\n }\n\n function animateAfter(element, className, afterAnimationComplete) {\n if(element.data(NG_ANIMATE_CSS_DATA_KEY)) {\n return animateRun(element, className, afterAnimationComplete);\n } else {\n animateClose(element, className);\n afterAnimationComplete();\n }\n }\n\n function animate(element, className, animationComplete) {\n //If the animateSetup function doesn't bother returning a\n //cancellation function then it means that there is no animation\n //to perform at all\n var preReflowCancellation = animateBefore(element, className);\n if(!preReflowCancellation) {\n animationComplete();\n return;\n }\n\n //There are two cancellation functions: one is before the first\n //reflow animation and the second is during the active state\n //animation. The first function will take care of removing the\n //data from the element which will not make the 2nd animation\n //happen in the first place\n var cancel = preReflowCancellation;\n afterReflow(function() {\n unblockTransitions(element);\n //once the reflow is complete then we point cancel to\n //the new cancellation function which will remove all of the\n //animation properties from the active animation\n cancel = animateAfter(element, className, animationComplete);\n });\n\n return function(cancelled) {\n (cancel || noop)(cancelled);\n };\n }\n\n function animateClose(element, className) {\n element.removeClass(className);\n element.removeClass(NG_ANIMATE_FALLBACK_CLASS_NAME);\n element.removeData(NG_ANIMATE_CSS_DATA_KEY);\n }\n\n return {\n allowCancel : function(element, animationEvent, className) {\n //always cancel the current animation if it is a\n //structural animation\n var oldClasses = (element.data(NG_ANIMATE_CSS_DATA_KEY) || {}).classes;\n if(!oldClasses || ['enter','leave','move'].indexOf(animationEvent) >= 0) {\n return true;\n }\n\n var parentElement = element.parent();\n var clone = angular.element(element[0].cloneNode());\n\n //make the element super hidden and override any CSS style values\n clone.attr('style','position:absolute; top:-9999px; left:-9999px');\n clone.removeAttr('id');\n clone.html('');\n\n forEach(oldClasses.split(' '), function(klass) {\n clone.removeClass(klass);\n });\n\n var suffix = animationEvent == 'addClass' ? '-add' : '-remove';\n clone.addClass(suffixClasses(className, suffix));\n parentElement.append(clone);\n\n var timings = getElementAnimationDetails(clone);\n clone.remove();\n\n return Math.max(timings.transitionDuration, timings.animationDuration) > 0;\n },\n\n enter : function(element, animationCompleted) {\n return animate(element, 'ng-enter', animationCompleted);\n },\n\n leave : function(element, animationCompleted) {\n return animate(element, 'ng-leave', animationCompleted);\n },\n\n move : function(element, animationCompleted) {\n return animate(element, 'ng-move', animationCompleted);\n },\n\n beforeAddClass : function(element, className, animationCompleted) {\n var cancellationMethod = animateBefore(element, suffixClasses(className, '-add'));\n if(cancellationMethod) {\n afterReflow(function() {\n unblockTransitions(element);\n animationCompleted();\n });\n return cancellationMethod;\n }\n animationCompleted();\n },\n\n addClass : function(element, className, animationCompleted) {\n return animateAfter(element, suffixClasses(className, '-add'), animationCompleted);\n },\n\n beforeRemoveClass : function(element, className, animationCompleted) {\n var cancellationMethod = animateBefore(element, suffixClasses(className, '-remove'));\n if(cancellationMethod) {\n afterReflow(function() {\n unblockTransitions(element);\n animationCompleted();\n });\n return cancellationMethod;\n }\n animationCompleted();\n },\n\n removeClass : function(element, className, animationCompleted) {\n return animateAfter(element, suffixClasses(className, '-remove'), animationCompleted);\n }\n };\n\n function suffixClasses(classes, suffix) {\n var className = '';\n classes = angular.isArray(classes) ? classes : classes.split(/\\s+/);\n forEach(classes, function(klass, i) {\n if(klass && klass.length > 0) {\n className += (i > 0 ? ' ' : '') + klass + suffix;\n }\n });\n return className;\n }\n }]);\n }]);\n", "meta": {"content_hash": "ed82e7ac286038fb9ede14c272a09167", "timestamp": "", "source": "github", "line_count": 1279, "max_line_length": 244, "avg_line_length": 48.320562939796716, "alnum_prop": 0.5992362706708521, "repo_name": "songlinjava/angular.js", "id": "81d200431b5933cfd15f8d8accec62cc63e5388d", "size": "61802", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/ngAnimate/animate.js", "mode": "33188", "license": "mit", "language": []}} {"text": "namespace base { class Thread; }\n\nnamespace sandbox {\n\nclass SANDBOX_EXPORT ThreadHelpers {\n public:\n // Check whether the current process is single threaded. |proc_fd|\n // must be a file descriptor to /proc/ and remains owned by the\n // caller.\n static bool IsSingleThreaded(int proc_fd);\n static bool IsSingleThreaded();\n\n // Crash if the current process is not single threaded. This will wait\n // on /proc to be updated. In the case where this doesn't crash, this will\n // return promptly. In the case where this does crash, this will first wait\n // for a few ms in Debug mode, a few seconds in Release mode.\n static void AssertSingleThreaded(int proc_fd);\n static void AssertSingleThreaded();\n\n // Stop |thread| and ensure that it does not have an entry in\n // /proc/self/task/ from the point of view of the current thread. This is\n // the way to stop threads before calling IsSingleThreaded().\n static bool StopThreadAndWatchProcFS(int proc_fd, base::Thread* thread);\n\n static const char* GetAssertSingleThreadedErrorMessageForTests();\n\n private:\n DISALLOW_IMPLICIT_CONSTRUCTORS(ThreadHelpers);\n};\n\n} // namespace sandbox\n\n#endif // SANDBOX_LINUX_SERVICES_THREAD_HELPERS_H_\n", "meta": {"content_hash": "7284ed8fa925d757aa39452492f6c8fe", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 77, "avg_line_length": 36.303030303030305, "alnum_prop": 0.7420701168614358, "repo_name": "Fireblend/chromium-crosswalk", "id": "f4abdffd03b418b15b07f08a6b32b3b08d499db9", "size": "1525", "binary": false, "copies": "13", "ref": "refs/heads/master", "path": "sandbox/linux/services/thread_helpers.h", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "AppleScript", "bytes": "6973"}, {"name": "Arduino", "bytes": "464"}, {"name": "Assembly", "bytes": "34367"}, {"name": "Batchfile", "bytes": "8451"}, {"name": "C", "bytes": "9397825"}, {"name": "C++", "bytes": "235052525"}, {"name": "CSS", "bytes": "951745"}, {"name": "DM", "bytes": "60"}, {"name": "Emacs Lisp", "bytes": "988"}, {"name": "Groff", "bytes": "2494"}, {"name": "HTML", "bytes": "29070071"}, {"name": "Java", "bytes": "10089056"}, {"name": "JavaScript", "bytes": "20170506"}, {"name": "Makefile", "bytes": "68234"}, {"name": "Objective-C", "bytes": "1639405"}, {"name": "Objective-C++", "bytes": "9478782"}, {"name": "PHP", "bytes": "97817"}, {"name": "PLpgSQL", "bytes": "178732"}, {"name": "Perl", "bytes": "63937"}, {"name": "Protocol Buffer", "bytes": "465313"}, {"name": "Python", "bytes": "8146950"}, {"name": "Shell", "bytes": "473684"}, {"name": "Standard ML", "bytes": "5034"}, {"name": "XSLT", "bytes": "418"}, {"name": "nesC", "bytes": "18347"}]}} {"text": "package io.leangen.graphql.generator.mapping;\n\nimport io.leangen.graphql.util.ClassUtils;\n\nimport java.lang.reflect.AnnotatedType;\n\npublic abstract class AbstractSimpleTypeAdapter extends AbstractTypeAdapter {\n\n @SuppressWarnings(\"WeakerAccess\")\n protected final Class rawSourceType;\n\n protected AbstractSimpleTypeAdapter() {\n this.rawSourceType = ClassUtils.getRawType(sourceType.getType());\n }\n\n @Override\n public boolean supports(AnnotatedType type) {\n return ClassUtils.isSuperClass(rawSourceType, type);\n }\n}\n", "meta": {"content_hash": "f88b0e383bb1f5c93b8afee69ad8b315", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 89, "avg_line_length": 28.2, "alnum_prop": 0.74822695035461, "repo_name": "leangen/GraphQL-SPQR", "id": "5dc471494ab5746d371c3c433df2d3e081b89162", "size": "564", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/io/leangen/graphql/generator/mapping/AbstractSimpleTypeAdapter.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Java", "bytes": "1016834"}]}} {"text": "\n * @date 10.21.2013\n * @package shop_search\n * @subpackage helpers\n */\nclass FacetHelper extends Object\n{\n /** @var bool - if this is turned on it will use an algorithm that doesn't require traversing the data set if possible */\n private static $faster_faceting = false;\n\n /** @var bool - should the facets (link and checkbox only) be sorted - this can mess with things like category lists */\n private static $sort_facet_values = true;\n\n /** @var string - I don't know why you'd want to override this, but you could if you wanted */\n private static $attribute_facet_regex = '/^ATT(\\d+)$/';\n\n /** @var bool - For checkbox facets, is the initial state all checked or all unchecked? */\n private static $default_checkbox_state = true;\n\n\n /**\n * @return FacetHelper\n */\n public static function inst()\n {\n return Injector::inst()->get('FacetHelper');\n }\n\n\n /**\n * Performs some quick pre-processing on filters from any source\n *\n * @param array $filters\n * @return array\n */\n public function scrubFilters($filters)\n {\n if (!is_array($filters)) {\n $filters = array();\n }\n\n foreach ($filters as $k => $v) {\n if (empty($v)) {\n unset($filters[$k]);\n }\n // this allows you to send an array as a comma-separated list, which is easier on the query string length\n if (is_string($v) && strpos($v, 'LIST~') === 0) {\n $filters[$k] = explode(',', substr($v, 5));\n }\n }\n\n return $filters;\n }\n\n\n /**\n * @param DataList $list\n * @param array $filters\n * @param DataObject|string $sing - just a singleton object we can get information off of\n * @return DataList\n */\n public function addFiltersToDataList($list, array $filters, $sing=null)\n {\n if (!$sing) {\n $sing = singleton($list->dataClass());\n }\n if (is_string($sing)) {\n $sing = singleton($sing);\n }\n\n if (!empty($filters)) {\n foreach ($filters as $filterField => $filterVal) {\n if ($sing->hasExtension('HasStaticAttributes') && preg_match(self::config()->attribute_facet_regex, $filterField, $matches)) {\n //\t\t\t\t\t$sav = $sing->StaticAttributeValues();\n//\t\t\t\t\tDebug::log(\"sav = {$sav->getJoinTable()}, {$sav->getLocalKey()}, {$sav->getForeignKey()}\");\n//\t\t\t\t\t$list = $list\n//\t\t\t\t\t\t->innerJoin($sav->getJoinTable(), \"\\\"{$sing->baseTable()}\\\".\\\"ID\\\" = \\\"{$sav->getJoinTable()}\\\".\\\"{$sav->getLocalKey()}\\\"\")\n//\t\t\t\t\t\t->filter(\"\\\"{$sav->getJoinTable()}\\\".\\\"{$sav->getForeignKey()}\\\"\", $filterVal)\n//\t\t\t\t\t;\n // TODO: This logic should be something like the above, but I don't know\n // how to get the join table from a singleton (which returns an UnsavedRelationList\n // instead of a ManyManyList). I've got a deadline to meet, though, so this\n // will catch the majority of cases as long as the extension is applied to the\n // Product class instead of a subclass.\n $list = $list\n ->innerJoin('Product_StaticAttributeTypes', \"\\\"SiteTree\\\".\\\"ID\\\" = \\\"Product_StaticAttributeTypes\\\".\\\"ProductID\\\"\")\n ->innerJoin('ProductAttributeValue', \"\\\"Product_StaticAttributeTypes\\\".\\\"ProductAttributeTypeID\\\" = \\\"ProductAttributeValue\\\".\\\"TypeID\\\"\")\n ->innerJoin('Product_StaticAttributeValues', \"\\\"SiteTree\\\".\\\"ID\\\" = \\\"Product_StaticAttributeValues\\\".\\\"ProductID\\\" AND \\\"ProductAttributeValue\\\".\\\"ID\\\" = \\\"Product_StaticAttributeValues\\\".\\\"ProductAttributeValueID\\\"\")\n ->filter(\"Product_StaticAttributeValues.ProductAttributeValueID\", $filterVal);\n } else {\n $list = $list->filter($this->processFilterField($sing, $filterField, $filterVal));\n }\n }\n }\n\n return $list;\n }\n\n\n /**\n * @param DataObject $rec This would normally just be a singleton but we don't want to have to create it over and over\n * @param string $filterField\n * @param mixed $filterVal\n * @return array - returns the new filter added\n */\n public function processFilterField($rec, $filterField, $filterVal)\n {\n // First check for VFI fields\n if ($rec->hasExtension('VirtualFieldIndex') && ($spec = $rec->getVFISpec($filterField))) {\n if ($spec['Type'] == VirtualFieldIndex::TYPE_LIST) {\n // Lists have to be handled a little differently\n $f = $rec->getVFIFieldName($filterField) . ':PartialMatch';\n if (is_array($filterVal)) {\n foreach ($filterVal as &$val) {\n $val = '|' . $val . '|';\n }\n return array($f => $filterVal);\n } else {\n return array($f => '|' . $filterVal . '|');\n }\n } else {\n // Simples are simple\n $filterField = $rec->getVFIFieldName($filterField);\n }\n }\n\n // Next check for regular db fields\n if ($rec->dbObject($filterField)) {\n // Is it a range value?\n if (is_string($filterVal) && preg_match('/^RANGE\\~(.+)\\~(.+)$/', $filterVal, $m)) {\n $filterField .= ':Between';\n $filterVal = array_slice($m, 1, 2);\n }\n\n return array($filterField => $filterVal);\n }\n\n return array();\n }\n\n\n /**\n * Processes the facet spec and removes any shorthand (field => label).\n * @param array $facetSpec\n * @return array\n */\n public function expandFacetSpec(array $facetSpec)\n {\n if (is_null($facetSpec)) {\n return array();\n }\n $facets = array();\n\n foreach ($facetSpec as $field => $label) {\n if (is_array($label)) {\n $facets[$field] = $label;\n } else {\n $facets[$field] = array('Label' => $label);\n }\n\n if (empty($facets[$field]['Source'])) {\n $facets[$field]['Source'] = $field;\n }\n if (empty($facets[$field]['Type'])) {\n $facets[$field]['Type'] = ShopSearch::FACET_TYPE_LINK;\n }\n\n if (empty($facets[$field]['Values'])) {\n $facets[$field]['Values'] = array();\n } else {\n $vals = $facets[$field]['Values'];\n if (is_string($vals)) {\n $vals = eval('return ' . $vals . ';');\n }\n $facets[$field]['Values'] = array();\n foreach ($vals as $val => $lbl) {\n $facets[$field]['Values'][$val] = new ArrayData(array(\n 'Label' => $lbl,\n 'Value' => $val,\n 'Count' => 0,\n ));\n }\n }\n }\n\n return $facets;\n }\n\n\n /**\n * This is super-slow. I'm assuming if you're using facets you\n * probably also ought to be using Solr or something else. Or\n * maybe you have unlimited time and can refactor this feature\n * and submit a pull request...\n *\n * TODO: If this is going to be used for categories we're going\n * to have to really clean it up and speed it up.\n * Suggestion:\n * - option to turn off counts\n * - switch order of nested array so we don't go through results unless needed\n * - if not doing counts, min/max and link facets can be handled w/ queries\n * - separate that bit out into a new function\n * NOTE: This is partially done with the \"faster_faceting\" config\n * option but more could be done, particularly by covering link facets as well.\n *\n * Output - list of ArrayData in the format:\n * Label - name of the facet\n * Source - field name of the facet\n * Type - one of the ShopSearch::FACET_TYPE_XXXX constants\n * Values - SS_List of possible values for this facet\n *\n * @param SS_List $matches\n * @param array $facetSpec\n * @param bool $autoFacetAttributes [optional]\n * @return ArrayList\n */\n public function buildFacets(SS_List $matches, array $facetSpec, $autoFacetAttributes=false)\n {\n $facets = $this->expandFacetSpec($facetSpec);\n if (!$autoFacetAttributes && (empty($facets) || !$matches || !$matches->count())) {\n return new ArrayList();\n }\n $fasterMethod = (bool)$this->config()->faster_faceting;\n\n // fill them in\n foreach ($facets as $field => &$facet) {\n if (preg_match(self::config()->attribute_facet_regex, $field, $m)) {\n $this->buildAttributeFacet($matches, $facet, $m[1]);\n continue;\n }\n\n // NOTE: using this method range and checkbox facets don't get counts\n if ($fasterMethod && $facet['Type'] != ShopSearch::FACET_TYPE_LINK) {\n if ($facet['Type'] == ShopSearch::FACET_TYPE_RANGE) {\n if (isset($facet['RangeMin'])) {\n $facet['MinValue'] = $facet['RangeMin'];\n }\n if (isset($facet['RangeMax'])) {\n $facet['MaxValue'] = $facet['RangeMax'];\n }\n }\n\n continue;\n }\n\n foreach ($matches as $rec) {\n // If it's a range facet, set up the min/max\n if ($facet['Type'] == ShopSearch::FACET_TYPE_RANGE) {\n if (isset($facet['RangeMin'])) {\n $facet['MinValue'] = $facet['RangeMin'];\n }\n if (isset($facet['RangeMax'])) {\n $facet['MaxValue'] = $facet['RangeMax'];\n }\n }\n\n // If the field is accessible via normal methods, including\n // a user-defined getter, prefer that\n $fieldValue = $rec->relObject($field);\n if (is_null($fieldValue) && $rec->hasMethod($meth = \"get{$field}\")) {\n $fieldValue = $rec->$meth();\n }\n\n // If not, look for a VFI field\n if (!$fieldValue && $rec->hasExtension('VirtualFieldIndex')) {\n $fieldValue = $rec->getVFI($field);\n }\n\n // If we found something, process it\n if (!empty($fieldValue)) {\n // normalize so that it's iterable\n if (!is_array($fieldValue) && !$fieldValue instanceof SS_List) {\n $fieldValue = array($fieldValue);\n }\n\n foreach ($fieldValue as $obj) {\n if (empty($obj)) {\n continue;\n }\n\n // figure out the right label\n if (is_object($obj) && $obj->hasMethod('Nice')) {\n $lbl = $obj->Nice();\n } elseif (is_object($obj) && !empty($obj->Title)) {\n $lbl = $obj->Title;\n } elseif (\n is_numeric($obj) &&\n !empty($facet['LabelFormat']) &&\n $facet['LabelFormat'] === 'Currency' &&\n $facet['Type'] !== ShopSearch::FACET_TYPE_RANGE // this one handles it via javascript\n ) {\n $tmp = Currency::create($field);\n $tmp->setValue($obj);\n $lbl = $tmp->Nice();\n } else {\n $lbl = (string)$obj;\n }\n\n // figure out the value for sorting\n if (is_object($obj) && $obj->hasMethod('getAmount')) {\n $val = $obj->getAmount();\n } elseif (is_object($obj) && !empty($obj->ID)) {\n $val = $obj->ID;\n } else {\n $val = (string)$obj;\n }\n\n // if it's a range facet, calculate the min and max\n if ($facet['Type'] == ShopSearch::FACET_TYPE_RANGE) {\n if (!isset($facet['MinValue']) || $val < $facet['MinValue']) {\n $facet['MinValue'] = $val;\n $facet['MinLabel'] = $lbl;\n }\n if (!isset($facet['RangeMin']) || $val < $facet['RangeMin']) {\n $facet['RangeMin'] = $val;\n }\n if (!isset($facet['MaxValue']) || $val > $facet['MaxValue']) {\n $facet['MaxValue'] = $val;\n $facet['MaxLabel'] = $lbl;\n }\n if (!isset($facet['RangeMax']) || $val > $facet['RangeMax']) {\n $facet['RangeMax'] = $val;\n }\n }\n\n // Tally the value in the facets\n if (!isset($facet['Values'][$val])) {\n $facet['Values'][$val] = new ArrayData(array(\n 'Label' => $lbl,\n 'Value' => $val,\n 'Count' => 1,\n ));\n } elseif ($facet['Values'][$val]) {\n $facet['Values'][$val]->Count++;\n }\n }\n }\n }\n }\n\n // if we're auto-building the facets based on attributes,\n if ($autoFacetAttributes) {\n $facets = array_merge($this->buildAllAttributeFacets($matches), $facets);\n }\n\n // convert values to arraylist\n $out = new ArrayList();\n $sortValues = self::config()->sort_facet_values;\n foreach ($facets as $f) {\n if ($sortValues) {\n ksort($f['Values']);\n }\n $f['Values'] = new ArrayList($f['Values']);\n $out->push(new ArrayData($f));\n }\n\n return $out;\n }\n\n\n /**\n * NOTE: this will break if applied to something that's not a SiteTree subclass.\n * @param DataList|PaginatedList $matches\n * @param array $facet\n * @param int $typeID\n */\n protected function buildAttributeFacet($matches, array &$facet, $typeID)\n {\n $q = $matches instanceof PaginatedList ? $matches->getList()->dataQuery()->query() : $matches->dataQuery()->query();\n\n if (empty($facet['Label'])) {\n $type = ProductAttributeType::get()->byID($typeID);\n $facet['Label'] = $type->Label;\n }\n\n $baseTable = $q->getFrom();\n if (is_array($baseTable)) {\n $baseTable = reset($baseTable);\n }\n\n $q = $q->setSelect(array())\n ->selectField('\"ProductAttributeValue\".\"ID\"', 'Value')\n ->selectField('\"ProductAttributeValue\".\"Value\"', 'Label')\n ->selectField('count(distinct '.$baseTable.'.\"ID\")', 'Count')\n ->selectField('\"ProductAttributeValue\".\"Sort\"')\n ->addInnerJoin('Product_StaticAttributeValues', $baseTable.'.\"ID\" = \"Product_StaticAttributeValues\".\"ProductID\"')\n ->addInnerJoin('ProductAttributeValue', '\"Product_StaticAttributeValues\".\"ProductAttributeValueID\" = \"ProductAttributeValue\".\"ID\"')\n ->addWhere(sprintf(\"\\\"ProductAttributeValue\\\".\\\"TypeID\\\" = '%d'\", $typeID))\n ->setOrderBy('\"ProductAttributeValue\".\"Sort\"', 'ASC')\n ->setGroupBy('\"ProductAttributeValue\".\"ID\"')\n ->execute()\n ;\n\n $facet['Values'] = array();\n foreach ($q as $row) {\n $facet['Values'][ $row['Value'] ] = new ArrayData($row);\n }\n }\n\n\n /**\n * Builds facets from all attributes present in the data set.\n * @param DataList|PaginatedList $matches\n * @return array\n */\n protected function buildAllAttributeFacets($matches)\n {\n $q = $matches instanceof PaginatedList ? $matches->getList()->dataQuery()->query() : $matches->dataQuery()->query();\n\n // this is the easiest way to get SiteTree vs SiteTree_Live\n $baseTable = $q->getFrom();\n if (is_array($baseTable)) {\n $baseTable = reset($baseTable);\n }\n\n $q = $q->setSelect(array())\n ->selectField('\"ProductAttributeType\".\"ID\"', 'TypeID')\n ->selectField('\"ProductAttributeType\".\"Label\"', 'TypeLabel')\n ->selectField('\"ProductAttributeValue\".\"ID\"', 'Value')\n ->selectField('\"ProductAttributeValue\".\"Value\"', 'Label')\n ->selectField('count(distinct '.$baseTable.'.\"ID\")', 'Count')\n ->selectField('\"ProductAttributeValue\".\"Sort\"')\n ->addInnerJoin('Product_StaticAttributeTypes', $baseTable.'.\"ID\" = \"Product_StaticAttributeTypes\".\"ProductID\"')\n ->addInnerJoin('ProductAttributeType', '\"Product_StaticAttributeTypes\".\"ProductAttributeTypeID\" = \"ProductAttributeType\".\"ID\"')\n ->addInnerJoin('Product_StaticAttributeValues', $baseTable.'.\"ID\" = \"Product_StaticAttributeValues\".\"ProductID\"')\n ->addInnerJoin('ProductAttributeValue', '\"Product_StaticAttributeValues\".\"ProductAttributeValueID\" = \"ProductAttributeValue\".\"ID\"'\n . ' AND \"ProductAttributeValue\".\"TypeID\" = \"ProductAttributeType\".\"ID\"')\n ->setOrderBy(array(\n '\"ProductAttributeType\".\"Label\"' => 'ASC',\n '\"ProductAttributeValue\".\"Sort\"' => 'ASC',\n ))\n ->setGroupBy(array('\"ProductAttributeValue\".\"ID\"', '\"ProductAttributeType\".\"ID\"'))\n ->execute()\n ;\n\n\n $curType = 0;\n $facets = array();\n $curFacet = null;\n foreach ($q as $row) {\n if ($curType != $row['TypeID']) {\n if ($curType > 0) {\n $facets['ATT'.$curType] = $curFacet;\n }\n $curType = $row['TypeID'];\n $curFacet = array(\n 'Label' => $row['TypeLabel'],\n 'Source' => 'ATT'.$curType,\n 'Type' => ShopSearch::FACET_TYPE_LINK,\n 'Values' => array(),\n );\n }\n\n unset($row['TypeID']);\n unset($row['TypeLabel']);\n $curFacet['Values'][ $row['Value'] ] = new ArrayData($row);\n }\n\n if ($curType > 0) {\n $facets['ATT'.$curType] = $curFacet;\n }\n return $facets;\n }\n\n\n /**\n * Inserts a \"Link\" field into the values for each facet which can be\n * used to get a filtered search based on that facets\n *\n * @param ArrayList $facets\n * @param array $baseParams\n * @param string $baseLink\n * @return ArrayList\n */\n public function insertFacetLinks(ArrayList $facets, array $baseParams, $baseLink)\n {\n $qs_f = Config::inst()->get('ShopSearch', 'qs_filters');\n $qs_t = Config::inst()->get('ShopSearch', 'qs_title');\n\n foreach ($facets as $facet) {\n switch ($facet->Type) {\n case ShopSearch::FACET_TYPE_RANGE:\n $params = array_merge($baseParams, array());\n if (!isset($params[$qs_f])) {\n $params[$qs_f] = array();\n }\n $params[$qs_f][$facet->Source] = 'RANGEFACETVALUE';\n $params[$qs_t] = $facet->Label . ': RANGEFACETLABEL';\n $facet->Link = $baseLink . '?' . http_build_query($params);\n break;\n\n case ShopSearch::FACET_TYPE_CHECKBOX;\n $facet->LinkDetails = json_encode(array(\n 'filter' => $qs_f,\n 'source' => $facet->Source,\n 'leaves' => $facet->FilterOnlyLeaves,\n ));\n\n // fall through on purpose\n\n default:\n foreach ($facet->Values as $value) {\n // make a copy of the existing params\n $params = array_merge($baseParams, array());\n\n // add the filter for this value\n if (!isset($params[$qs_f])) {\n $params[$qs_f] = array();\n }\n if ($facet->Type == ShopSearch::FACET_TYPE_CHECKBOX) {\n unset($params[$qs_f][$facet->Source]); // this will be figured out via javascript\n $params[$qs_t] = ($value->Active ? 'Remove ' : '') . $facet->Label . ': ' . $value->Label;\n } else {\n $params[$qs_f][$facet->Source] = $value->Value;\n $params[$qs_t] = $facet->Label . ': ' . $value->Label;\n }\n\n // build a new link\n $value->Link = $baseLink . '?' . http_build_query($params);\n }\n }\n }\n\n return $facets;\n }\n\n\n /**\n * @param ArrayList $children\n * @return array\n */\n protected function getRecursiveChildValues(ArrayList $children)\n {\n $out = array();\n\n foreach ($children as $child) {\n $out[$child->Value] = $child->Value;\n if (!empty($child->Children)) {\n $out += $this->getRecursiveChildValues($child->Children);\n }\n }\n\n return $out;\n }\n\n\n /**\n * For checkbox and range facets, this updates the state (checked and min/max)\n * based on current filter values.\n *\n * @param ArrayList $facets\n * @param array $filters\n * @return ArrayList\n */\n public function updateFacetState(ArrayList $facets, array $filters)\n {\n foreach ($facets as $facet) {\n if ($facet->Type == ShopSearch::FACET_TYPE_CHECKBOX) {\n if (empty($filters[$facet->Source])) {\n // If the filter is not being used at all, we count\n // all values as active.\n foreach ($facet->Values as $value) {\n $value->Active = (bool)FacetHelper::config()->default_checkbox_state;\n }\n } else {\n $filterVals = $filters[$facet->Source];\n if (!is_array($filterVals)) {\n $filterVals = array($filterVals);\n }\n $this->updateCheckboxFacetState(\n !empty($facet->NestedValues) ? $facet->NestedValues : $facet->Values,\n $filterVals,\n !empty($facet->FilterOnlyLeaves));\n }\n } elseif ($facet->Type == ShopSearch::FACET_TYPE_RANGE) {\n if (!empty($filters[$facet->Source]) && preg_match('/^RANGE\\~(.+)\\~(.+)$/', $filters[$facet->Source], $m)) {\n $facet->MinValue = $m[1];\n $facet->MaxValue = $m[2];\n }\n }\n }\n\n return $facets;\n }\n\n\n /**\n * For checkboxes, updates the state based on filters. Handles hierarchies and FilterOnlyLeaves\n * @param ArrayList $values\n * @param array $filterVals\n * @param bool $filterOnlyLeaves [optional]\n * @return bool - true if any of the children are true, false if all children are false\n */\n protected function updateCheckboxFacetState(ArrayList $values, array $filterVals, $filterOnlyLeaves=false)\n {\n $out = false;\n\n foreach ($values as $value) {\n if ($filterOnlyLeaves && !empty($value->Children)) {\n if (in_array($value->Value, $filterVals)) {\n // This wouldn't be normal, but even if it's not a leaf, we want to handle\n // the case where a filter might be set for this node. It should still show up correctly.\n $value->Active = true;\n foreach ($value->Children as $c) {\n $c->Active = true;\n }\n // TODO: handle more than one level of recursion here\n } else {\n $value->Active = $this->updateCheckboxFacetState($value->Children, $filterVals, $filterOnlyLeaves);\n }\n } else {\n $value->Active = in_array($value->Value, $filterVals);\n }\n\n if ($value->Active) {\n $out = true;\n }\n }\n\n return $out;\n }\n\n\n /**\n * If there are any facets (link or checkbox) that have a HierarchyDivider field\n * in the spec, transform them into a hierarchy so they can be displayed as such.\n *\n * @param ArrayList $facets\n * @return ArrayList\n */\n public function transformHierarchies(ArrayList $facets)\n {\n foreach ($facets as $facet) {\n if (!empty($facet->HierarchyDivider)) {\n $out = new ArrayList();\n $parentStack = array();\n\n foreach ($facet->Values as $value) {\n if (empty($value->Label)) {\n continue;\n }\n $value->FullLabel = $value->Label;\n\n // Look for the most recent parent that matches the beginning of this one\n while (count($parentStack) > 0) {\n $curParent = $parentStack[ count($parentStack)-1 ];\n if (strpos($value->Label, $curParent->FullLabel) === 0) {\n if (!isset($curParent->Children)) {\n $curParent->Children = new ArrayList();\n }\n\n // Modify the name so we only show the last component\n $value->FullLabel = $value->Label;\n $p = strrpos($value->Label, $facet->HierarchyDivider);\n if ($p > -1) {\n $value->Label = trim(substr($value->Label, $p + 1));\n }\n\n $curParent->Children->push($value);\n break;\n } else {\n array_pop($parentStack);\n }\n }\n\n // If we went all the way back to the root without a match, this is\n // a new parent item\n if (count($parentStack) == 0) {\n $out->push($value);\n }\n\n // Each item could be a potential parent. If it's not it will get popped\n // immediately on the next iteration\n $parentStack[] = $value;\n }\n\n $facet->NestedValues = $out;\n }\n }\n\n return $facets;\n }\n}\n", "meta": {"content_hash": "6478753a7729571cecfad83c1a477ed7", "timestamp": "", "source": "github", "line_count": 691, "max_line_length": 242, "avg_line_length": 40.04920405209841, "alnum_prop": 0.4753559297535593, "repo_name": "markguinn/silverstripe-shop-search", "id": "bfb09294dcb715bc4cacf98092967be2b163a30f", "size": "27674", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "code/helpers/FacetHelper.php", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "3858"}, {"name": "JavaScript", "bytes": "16835"}, {"name": "PHP", "bytes": "152023"}, {"name": "Scheme", "bytes": "3171"}, {"name": "Shell", "bytes": "463"}]}} {"text": "package org.jetbrains.plugins.hocon.misc\n\nimport com.intellij.psi.PsiElement\nimport com.intellij.xml.breadcrumbs.BreadcrumbsInfoProvider\nimport org.jetbrains.plugins.hocon.lang.HoconLanguage\nimport org.jetbrains.plugins.hocon.psi.HKeyedField\n\nclass HoconBreadcrumbsInfoProvider extends BreadcrumbsInfoProvider {\n def getElementTooltip(e: PsiElement) = null\n\n def getElementInfo(e: PsiElement): String = e match {\n case kf: HKeyedField => kf.key.map(_.stringValue).getOrElse(\"\")\n case _ => \"\"\n }\n\n def acceptElement(e: PsiElement): Boolean = e match {\n case _: HKeyedField => true\n case _ => false\n }\n\n def getLanguages = Array(HoconLanguage)\n}\n", "meta": {"content_hash": "5a63083ddf29feded05294f5a117eca1", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 68, "avg_line_length": 30.09090909090909, "alnum_prop": 0.7522658610271903, "repo_name": "loskutov/intellij-scala", "id": "0fc95d7dcab59656b53fc4efed2ef81453c363f6", "size": "662", "binary": false, "copies": "2", "ref": "refs/heads/idea172.x-better-implicits", "path": "src/org/jetbrains/plugins/hocon/misc/HoconBreadcrumbsInfoProvider.scala", "mode": "33188", "license": "apache-2.0", "language": [{"name": "HTML", "bytes": "55566"}, {"name": "Java", "bytes": "1397858"}, {"name": "Lex", "bytes": "35728"}, {"name": "Scala", "bytes": "11387086"}, {"name": "Shell", "bytes": "537"}]}} {"text": "export default connect()(App)\n", "meta": {"content_hash": "e182d1add0a3d11793f891809b401601", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 29, "avg_line_length": 30.0, "alnum_prop": 0.7666666666666667, "repo_name": "yp/eslint-plugin-import", "id": "9c1d8d40706b16077e1649bf8f5d1f4e44f8b275", "size": "45", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/files/redux.js", "mode": "33188", "license": "mit", "language": [{"name": "CoffeeScript", "bytes": "27"}, {"name": "JavaScript", "bytes": "141336"}]}} {"text": "\n\npackage com.google.cloud.resourcemanager.v3.samples;\n\n// [START cloudresourcemanager_v3_generated_TagValuesSettings_GetTagValue_sync]\nimport com.google.cloud.resourcemanager.v3.TagValuesSettings;\nimport java.time.Duration;\n\npublic class SyncGetTagValue {\n\n public static void main(String[] args) throws Exception {\n syncGetTagValue();\n }\n\n public static void syncGetTagValue() throws Exception {\n // This snippet has been automatically generated and should be regarded as a code template only.\n // It will require modifications to work:\n // - It may require correct/in-range values for request initialization.\n // - It may require specifying regional endpoints when creating the service client as shown in\n // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library\n TagValuesSettings.Builder tagValuesSettingsBuilder = TagValuesSettings.newBuilder();\n tagValuesSettingsBuilder\n .getTagValueSettings()\n .setRetrySettings(\n tagValuesSettingsBuilder.getTagValueSettings().getRetrySettings().toBuilder()\n .setTotalTimeout(Duration.ofSeconds(30))\n .build());\n TagValuesSettings tagValuesSettings = tagValuesSettingsBuilder.build();\n }\n}\n// [END cloudresourcemanager_v3_generated_TagValuesSettings_GetTagValue_sync]\n", "meta": {"content_hash": "833ec22440066f5c5dafb7a349d47bd2", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 100, "avg_line_length": 42.806451612903224, "alnum_prop": 0.7550866616428034, "repo_name": "googleapis/google-cloud-java", "id": "ea1e9e42c96dd58cbd5ddf466800e5f454236899", "size": "1922", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "java-resourcemanager/samples/snippets/generated/com/google/cloud/resourcemanager/v3/tagvaluessettings/gettagvalue/SyncGetTagValue.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Dockerfile", "bytes": "2614"}, {"name": "HCL", "bytes": "28592"}, {"name": "Java", "bytes": "826434232"}, {"name": "Jinja", "bytes": "2292"}, {"name": "Python", "bytes": "200408"}, {"name": "Shell", "bytes": "97954"}]}} {"text": "{% extends \"base.html\" %}\n{% import \"bootstrap/wtf.html\" as wtf %}\n\n\n{% block title %}Flasky{% endblock %}\n\n{% block page_content %}\n\n
\n\n {{ wtf.quick_form(form) }}\n\n
\n\n\n\n\n
\n
\n \n \n \n \n {% for date_tmp in date_range_response %}\n \n {% endfor %}\n \n \n \n\n \n\n {% for teacher in teacher_sum_response_data%}\n\n \n \n\n {% for date_tmp in date_range_response %}\n {% if teacher_sum_response_data[teacher][date_tmp] %}\n \n {% else %}\n \n {% endif %}\n {% endfor %}\n\n \n\n\n \n {% endfor %}\n \n\n
#{{ date_tmp.strftime( '%m-%d' ) }}
{{ date_tmp.strftime( '%a' ) }}
SUM
{{ teacher}}{{ teacher_sum_response_data[teacher][date_tmp].total_seconds()/3600 }}{{ teacher_sum_response_data[teacher]['total_classes_time'].total_seconds()/3600 }}
\n\n\n
\n\n\n\n\n{% endblock %}\n\n{% block scripts %}\n{{ super() }}\n{{ pagedown.include_pagedown() }}\n{% endblock %}\n", "meta": {"content_hash": "8badc6e348235cedb484327ffd9fd265", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 217, "avg_line_length": 24.242424242424242, "alnum_prop": 0.446875, "repo_name": "phillinzzz/flasky", "id": "a4e9af2dfae3a02f8147746149befd42794468ec", "size": "1600", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/templates/generate_sum_teacher_classes.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "1855"}, {"name": "HTML", "bytes": "64919"}, {"name": "Mako", "bytes": "412"}, {"name": "Python", "bytes": "134813"}]}} {"text": "\n", "meta": {"content_hash": "901da9e709d0010b5d0bcb5cce18571f", "timestamp": "", "source": "github", "line_count": 194, "max_line_length": 59, "avg_line_length": 40.16494845360825, "alnum_prop": 0.7254876796714579, "repo_name": "nicolanrizzo/nationalitylist", "id": "6d6f933730fc0ddd971e1ce1f1233d49581a257d", "size": "7801", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "HTMLdropdown/fr.html", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "16790"}]}} {"text": " \n\npackage com.june.dto.back.demo;\n\nimport java.io.Serializable;\nimport java.util.List;\n\nimport com.june.common.PageDTO;\n\npublic class TreeGridDto extends PageDTO implements Serializable{\n\t/**\n\t * long serialVersionUID\n\t */\n\tprivate static final long serialVersionUID = 4936747688639597710L;\n\tprivate String id; //\u8282\u70b9id\n private String pid;//\u7236\u8282\u70b9id\n\tprivate String name;\n\tprivate String persons;\n\tprivate String begin;\n\tprivate String end;\n\tprivate List children;\n\tprivate String iconCls;//\u8282\u70b9\u7684\u56fe\u6807\n\tprivate String state; //\u8282\u70b9\u7684\u72b6\u6001\uff0c\u5c55\u5f00(open)\u8fd8\u662f\u95ed\u5408(closed)\uff0c\n\tpublic String getName() {\n\t\treturn name;\n\t}\n\tpublic void setName(String name) {\n\t\tthis.name = name;\n\t}\n\tpublic String getPersons() {\n\t\treturn persons;\n\t}\n\tpublic void setPersons(String persons) {\n\t\tthis.persons = persons;\n\t}\n\tpublic String getBegin() {\n\t\treturn begin;\n\t}\n\tpublic void setBegin(String begin) {\n\t\tthis.begin = begin;\n\t}\n\tpublic String getEnd() {\n\t\treturn end;\n\t}\n\tpublic void setEnd(String end) {\n\t\tthis.end = end;\n\t}\n\tpublic List getChildren() {\n\t\treturn children;\n\t}\n\tpublic void setChildren(List children) {\n\t\tthis.children = children;\n\t}\n\tpublic String getIconCls() {\n\t\treturn iconCls;\n\t}\n\tpublic void setIconCls(String iconCls) {\n\t\tthis.iconCls = iconCls;\n\t}\n\tpublic String getState() {\n\t\treturn state;\n\t}\n\tpublic void setState(String state) {\n\t\tthis.state = state;\n\t}\n\tpublic String getId() {\n\t\treturn id;\n\t}\n\tpublic void setId(String id) {\n\t\tthis.id = id;\n\t}\n\tpublic String getPid() {\n\t\treturn pid;\n\t}\n\tpublic void setPid(String pid) {\n\t\tthis.pid = pid;\n\t}\n\t@Override\n\tprotected String getDtoName() {\n\t\treturn \"TreeGridDto\";\n\t}\n}\n", "meta": {"content_hash": "17b9633ac5cab6e196f0418cd3455537", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 78, "avg_line_length": 20.182926829268293, "alnum_prop": 0.7087613293051359, "repo_name": "junehappylove/june_web_new", "id": "1636c9f674bd5ebcc2c7442302d4e13c5dd56ea2", "size": "2024", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/june/dto/back/demo/TreeGridDto.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "256048"}, {"name": "FreeMarker", "bytes": "74"}, {"name": "HTML", "bytes": "18205"}, {"name": "Java", "bytes": "582281"}, {"name": "JavaScript", "bytes": "883753"}, {"name": "PHP", "bytes": "178445"}]}} {"text": "@implementation MoodMessageCell\n\n- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier{\n \n if (self = [super initWithStyle:style reuseIdentifier:reuseIdentifier]) {\n \n self.tagImageView = [[UIImageView alloc] initWithFrame:CGRectZero];\n //self.tagImageView.backgroundColor = [UIColor redColor];\n [self.contentView addSubview:self.tagImageView];\n \n self.backImageView = [[UIImageView alloc] initWithFrame:CGRectZero];\n [self.contentView addSubview:self.backImageView];\n \n self.emptyImageView = [[UIImageView alloc] initWithFrame:CGRectZero];\n [self.contentView addSubview:self.emptyImageView];\n \n self.tagLabel = [[UILabel alloc] initWithFrame:CGRectZero];\n self.tagLabel.textColor = [UIColor whiteColor];\n [self.contentView addSubview:self.tagLabel];\n \n self.descriptionLabel = [[UILabel alloc] initWithFrame:CGRectZero];\n self.descriptionLabel.numberOfLines = 0;\n self.descriptionLabel.textColor = [UIColor colorWithWhite:0.510 alpha:1.000];\n [self.contentView addSubview:self.descriptionLabel];\n \n \n self.timeThread = [[UIImageView alloc] initWithFrame:CGRectZero];\n self.timeThread.backgroundColor = [UIColor grayColor];\n [self.contentView addSubview:self.timeThread];\n \n \n \n }\n \n return self;\n}\n\n- (void)setValueWithModel:(dayModel *)model{\n \n //\u6807\u8bb0\u56fe\u7247\n self.tagImageView.frame = CGRectMake(15, 2, 20, 20);\n self.tagImageView.image = [UIImage imageNamed:[NSString stringWithFormat:@\"%ld\", model.moodDay]];\n\n //\u6807\u8bb0\u80cc\u666f\n self.backImageView.frame = CGRectMake(CGRectGetMaxX(self.tagImageView.frame), self.tagImageView.frame.origin.y, jjScreenWidth - 70, 100);\n //\u80cc\u666f\u56fe\u7247\u5904\u7406\n UIImage *backImage = [UIImage imageNamed:@\"background\"];\n UIEdgeInsets edgeInsets = UIEdgeInsetsMake(5, 15, 0, 5);\n // \u62c9\u4f38\u56fe\u7247\n UIImage *newImage = [backImage resizableImageWithCapInsets:edgeInsets resizingMode:UIImageResizingModeStretch];\n self.backImageView.image = newImage;\n\n //\u6807\u8bb0\u5185\u5bb9\n self.tagLabel.frame = CGRectMake(self.backImageView.frame.origin.x+15, self.backImageView.frame.origin.y + 8, self.backImageView.frame.size.width - 5, 30);\n self.tagLabel.font = [UIFont systemFontOfSize:17 weight:10];\n self.tagLabel.text = [NSString getTagStringTag:model.tagDay];\n \n \n //\u8bb0\u5f55\u5185\u5bb9\n self.descriptionLabel.text = [NSString stringWithFormat:@\"%@\\n%@\",[NSString getDateStringFromDate:model.dateDay],model.content];\n self.descriptionLabel.font = [UIFont systemFontOfSize:14];\n \n CGFloat labelHeight = [GetHeightTool getHeightForText:self.descriptionLabel.text font:[UIFont systemFontOfSize:14] width:jjScreenWidth - 100];\n self.descriptionLabel.frame = CGRectMake(CGRectGetMinX(self.backImageView.frame) + 15, CGRectGetMaxY(self.tagLabel.frame)+ 5, self.backImageView.frame.size.width - 20, labelHeight);\n self.emptyImageView.frame = CGRectMake(self.backImageView.frame.origin.x + 7, CGRectGetMaxY(self.tagLabel.frame), self.backImageView.frame.size.width - 8, self.descriptionLabel.frame.size.height + 10);\n \n //\u80cc\u666f\u56fe\u7247\u5904\u7406\n UIImage *image = [UIImage imageNamed:@\"empty\"];\n //\u4e0a, \u5de6, \u4e0b, \u53f3\u90e8\u5206\u4e0d\u53ef\u62c9\u4f38\u7684\u533a\u57df\n UIEdgeInsets edgeInsets1 = UIEdgeInsetsMake(0, 10, 10, 10);\n UIImage *newimage = [image resizableImageWithCapInsets:edgeInsets1 resizingMode:UIImageResizingModeStretch];\n self.emptyImageView.image = newimage;\n \n \n //\u65f6\u95f4\u8f74\n self.timeThread.frame = CGRectMake(self.tagImageView.center.x - 1, CGRectGetMaxY(self.tagImageView.frame)+1, 2, self.descriptionLabel.frame.size.height + 33);\n\n \n \n}\n\n- (void)awakeFromNib {\n // Initialization code\n}\n\n- (void)setSelected:(BOOL)selected animated:(BOOL)animated {\n [super setSelected:selected animated:animated];\n\n // Configure the view for the selected state\n}\n\n@end\n", "meta": {"content_hash": "896a8222b5596bd706c245acc79ac0f6", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 205, "avg_line_length": 42.68478260869565, "alnum_prop": 0.7119938884644768, "repo_name": "YunZhiJunXiPeng/ShreddedBread", "id": "5c6850e14e4f8d181c1e31cd4918254cbdcc5286", "size": "4247", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ShreddedBread/Scenes/JJStatistics/CustomView/MoodMessageCell.m", "mode": "33261", "license": "mit", "language": [{"name": "Objective-C", "bytes": "260862"}, {"name": "Ruby", "bytes": "159"}]}} {"text": "\npackage org.eclipse.rdf4j.rio.hdt;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.zip.CheckedInputStream;\n\nimport org.eclipse.rdf4j.common.io.UncloseableInputStream;\n\n/**\n * Log64\n *\n * It contains the data part of the {@link HDTArray}, followed by the 32-bit CRC calculated over this data.\n *\n * Data structure:\n *\n *
\n * ...+---------+-------+\n *    | entries | CRC32 |\n * ...+---------+-------+\n * 
\n *\n * Entries are stored little-endian, with each entry using nrbits bits\n *\n * @author Bart Hanssens\n */\nclass HDTArrayLog64 extends HDTArray {\n\tprivate byte buffer[];\n\n\t@Override\n\tprotected int getType() {\n\t\treturn HDTArray.Type.LOG64.getValue();\n\t}\n\n\t@Override\n\tprotected int get(int i) {\n\t\t// start byte of the value, and start bit in that start byte\n\t\tint bytePos = (i * nrbits) / 8;\n\t\tint bitPos = (i * nrbits) % 8;\n\n\t\t// value bits may be encoded across boundaries of bytes\n\t\tint tmplen = (bitPos + nrbits + 7) / 8;\n\n\t\tlong val = 0L;\n\t\t// little-endian to big-endian\n\t\tfor (int j = 0; j < tmplen; j++) {\n\t\t\tval |= (buffer[bytePos + j] & 0xFFL) << (j * 8);\n\t\t}\n\n\t\tval >>= bitPos;\n\t\tval &= 0xFFFFFFFFFFFFFFFFL >>> (64 - nrbits);\n\n\t\treturn (int) val;\n\t}\n\n\t@Override\n\tprotected void parse(InputStream is) throws IOException {\n\t\tsuper.parse(is);\n\n\t\t// don't close CheckedInputStream, as it will close the underlying inputstream\n\t\ttry (UncloseableInputStream uis = new UncloseableInputStream(is);\n\t\t\t\tCheckedInputStream cis = new CheckedInputStream(uis, new CRC32())) {\n\t\t\t// read bytes, minimum 1\n\t\t\tlong bytes = (nrbits * entries + 7) / 8;\n\t\t\tif (bytes > Integer.MAX_VALUE) {\n\t\t\t\tthrow new UnsupportedOperationException(\"Maximum number of bytes in array exceeded: \" + bytes);\n\t\t\t}\n\n\t\t\tbuffer = new byte[(int) bytes];\n\t\t\tcis.read(buffer);\n\n\t\t\tcheckCRC(cis, is, 4);\n\t\t}\n\t}\n}\n", "meta": {"content_hash": "b1116b8f592c0b17f9ca3a314d25a6b9", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 107, "avg_line_length": 24.4, "alnum_prop": 0.6513661202185792, "repo_name": "eclipse/rdf4j", "id": "a571f89d1475b121f9881d7849e470c3c4ee2969", "size": "2349", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "core/rio/hdt/src/main/java/org/eclipse/rdf4j/rio/hdt/HDTArrayLog64.java", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "23587"}, {"name": "Dockerfile", "bytes": "791"}, {"name": "HTML", "bytes": "70935"}, {"name": "Java", "bytes": "15830881"}, {"name": "JavaScript", "bytes": "375102"}, {"name": "Ruby", "bytes": "36145"}, {"name": "Shell", "bytes": "23631"}, {"name": "TypeScript", "bytes": "52940"}, {"name": "XSLT", "bytes": "144185"}]}} {"text": "\n\npackage com.thoughtworks.go.util;\n\nimport com.thoughtworks.go.agent.ServerUrlGenerator;\nimport com.thoughtworks.go.domain.JobIdentifier;\nimport org.springframework.stereotype.Component;\n\nimport java.net.URI;\nimport java.net.URISyntaxException;\n\nimport static java.lang.String.format;\n\n@Component\npublic class URLService implements ServerUrlGenerator{\n private String baseRemotingURL;\n\n public URLService() {\n String url = new SystemEnvironment().getServiceUrl();\n if (url.endsWith(\"/\")) {\n url = url.substring(0, url.length() - 1);\n }\n baseRemotingURL = url;\n }\n\n public URLService(String baseRemotingURL) {\n this.baseRemotingURL = baseRemotingURL;\n }\n\n public String baseRemoteURL() {\n return baseRemotingURL;\n }\n\n public String getBuildRepositoryURL() {\n return baseRemotingURL + \"/remoting/remoteBuildRepository\";\n }\n\n public String getAgentRegistrationURL() {\n return baseRemotingURL + \"/admin/agent\";\n }\n \n public String getAgentLatestStatusUrl() {\n return baseRemotingURL + \"/admin/latest-agent.status\";\n }\n\n public String getUploadUrlOfAgent(JobIdentifier jobIdentifier, String filePath) {\n return getUploadUrlOfAgent(jobIdentifier, filePath, 1);\n }\n\n public String getComponentVersionsOnServerUrl() {\n return String.format(\"%s/%s\", baseRemotingURL, \"admin/component-versions-on-server\");\n }\n\n\n // TODO - keep buildId for now because currently we do not support 'jobcounter' \n // and therefore cannot locate job correctly when it is rescheduled\n public String getUploadUrlOfAgent(JobIdentifier jobIdentifier, String filePath, int attempt) {\n return format(\"%s/%s/%s/%s?attempt=%d&buildId=%d\", baseRemotingURL, \"remoting\", \"files\", jobIdentifier.artifactLocator(filePath), attempt, jobIdentifier.getBuildId());\n }\n\n /*\n * Server will use this method, the base url is in the request.\n */\n public String getRestfulArtifactUrl(JobIdentifier jobIdentifier, String filePath) {\n return format(\"/%s/%s\", \"files\", jobIdentifier.artifactLocator(filePath));\n }\n\n\n public String getUploadBaseUrlOfAgent(JobIdentifier jobIdentifier) {\n return format(\"%s/%s/%s/%s\", baseRemotingURL, \"remoting\", \"files\", jobIdentifier.artifactLocator(\"\"));\n }\n\n /*\n * Agent will use this method, the baseUrl will be injected from config xml in agent side.\n * This is used to fix security issues with the agent uploading artifacts when security is enabled.\n */\n public String getPropertiesUrl(JobIdentifier jobIdentifier, String propertyName) {\n return format(\"%s/%s/%s/%s\",\n baseRemotingURL, \"remoting\", \"properties\", jobIdentifier.propertyLocator(propertyName));\n }\n\n public String serverUrlFor(String subPath) {\n return format(\"%s/%s\", baseRemotingURL, subPath);\n }\n\n public String serverSslBaseUrl(int serverHttpsPort) {\n return baseRemotingURL;\n }\n\n public String getAgentRemoteWebSocketUrl() {\n return format(\"%s/%s\", getWebSocketBaseUrl(), \"agent-websocket\");\n }\n public String getWebSocketBaseUrl() {\n try {\n URI uri = new URI(baseRemotingURL);\n StringBuffer ret = new StringBuffer(\"wss://\");\n ret.append(uri.getHost()).append(\":\").append(uri.getPort());\n if (uri.getPath() != null) {\n ret.append(uri.getPath());\n }\n return ret.toString();\n } catch (URISyntaxException e) {\n throw new RuntimeException(\"Invalid Go Server url\", e);\n }\n }\n\n public String prefixPartialUrl(String url) {\n if(url.startsWith(\"/\")) {\n return format(\"%s%s\", baseRemoteURL(), url);\n }\n return url;\n }\n}\n", "meta": {"content_hash": "093951f53db590dc065c8ab7abe36e5a", "timestamp": "", "source": "github", "line_count": 113, "max_line_length": 175, "avg_line_length": 33.61946902654867, "alnum_prop": 0.6635956830744932, "repo_name": "VibyJocke/gocd", "id": "7a35ec79a46b08e4c52e08741ac03af6ad8a4b8e", "size": "4545", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "common/src/com/thoughtworks/go/util/URLService.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "8248"}, {"name": "CSS", "bytes": "502291"}, {"name": "FreeMarker", "bytes": "182"}, {"name": "HTML", "bytes": "647411"}, {"name": "Java", "bytes": "16221590"}, {"name": "JavaScript", "bytes": "2878258"}, {"name": "NSIS", "bytes": "19386"}, {"name": "PowerShell", "bytes": "743"}, {"name": "Ruby", "bytes": "2954955"}, {"name": "SQLPL", "bytes": "9050"}, {"name": "Shell", "bytes": "240291"}, {"name": "XSLT", "bytes": "153049"}]}} {"text": "'use strict';\n\nmodule.exports = function () {\n this.alert = function () {\n throw 'Doh! You used a real notifier rather than a test double!';\n };\n};\n", "meta": {"content_hash": "df60aba531d968f6828a52645e113116", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 69, "avg_line_length": 21.857142857142858, "alnum_prop": 0.6274509803921569, "repo_name": "dwhelan/training", "id": "b619376c1ad029817c41c04ba5a8fa1d345467a2", "size": "153", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "js/bartender/notifier.js", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "12876"}, {"name": "Java", "bytes": "8799"}, {"name": "JavaScript", "bytes": "7733"}, {"name": "Ruby", "bytes": "987"}, {"name": "Shell", "bytes": "131"}]}} {"text": "\n\npackage rest\n\nimport (\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/golang/glog\"\n\n\t\"k8s.io/kubernetes/pkg/api\"\n\t\"k8s.io/kubernetes/pkg/api/rest\"\n\t\"k8s.io/kubernetes/pkg/apis/extensions\"\n\textensionsapiv1beta1 \"k8s.io/kubernetes/pkg/apis/extensions/v1beta1\"\n\textensionsclient \"k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset/typed/extensions/internalversion\"\n\t\"k8s.io/kubernetes/pkg/genericapiserver\"\n\thorizontalpodautoscaleretcd \"k8s.io/kubernetes/pkg/registry/autoscaling/horizontalpodautoscaler/etcd\"\n\tjobetcd \"k8s.io/kubernetes/pkg/registry/batch/job/etcd\"\n\texpcontrolleretcd \"k8s.io/kubernetes/pkg/registry/extensions/controller/etcd\"\n\tdaemonetcd \"k8s.io/kubernetes/pkg/registry/extensions/daemonset/etcd\"\n\tdeploymentetcd \"k8s.io/kubernetes/pkg/registry/extensions/deployment/etcd\"\n\tingressetcd \"k8s.io/kubernetes/pkg/registry/extensions/ingress/etcd\"\n\tnetworkpolicyetcd \"k8s.io/kubernetes/pkg/registry/extensions/networkpolicy/etcd\"\n\tpspetcd \"k8s.io/kubernetes/pkg/registry/extensions/podsecuritypolicy/etcd\"\n\treplicasetetcd \"k8s.io/kubernetes/pkg/registry/extensions/replicaset/etcd\"\n\tthirdpartyresourceetcd \"k8s.io/kubernetes/pkg/registry/extensions/thirdpartyresource/etcd\"\n\tutilruntime \"k8s.io/kubernetes/pkg/util/runtime\"\n\t\"k8s.io/kubernetes/pkg/util/wait\"\n)\n\ntype RESTStorageProvider struct {\n\tResourceInterface ResourceInterface\n}\n\nvar _ genericapiserver.RESTStorageProvider = &RESTStorageProvider{}\n\nfunc (p RESTStorageProvider) NewRESTStorage(apiResourceConfigSource genericapiserver.APIResourceConfigSource, restOptionsGetter genericapiserver.RESTOptionsGetter) (genericapiserver.APIGroupInfo, bool) {\n\tapiGroupInfo := genericapiserver.NewDefaultAPIGroupInfo(extensions.GroupName)\n\n\tif apiResourceConfigSource.AnyResourcesForVersionEnabled(extensionsapiv1beta1.SchemeGroupVersion) {\n\t\tapiGroupInfo.VersionedResourcesStorageMap[extensionsapiv1beta1.SchemeGroupVersion.Version] = p.v1beta1Storage(apiResourceConfigSource, restOptionsGetter)\n\t\tapiGroupInfo.GroupMeta.GroupVersion = extensionsapiv1beta1.SchemeGroupVersion\n\t}\n\n\treturn apiGroupInfo, true\n}\n\nfunc (p RESTStorageProvider) v1beta1Storage(apiResourceConfigSource genericapiserver.APIResourceConfigSource, restOptionsGetter genericapiserver.RESTOptionsGetter) map[string]rest.Storage {\n\tversion := extensionsapiv1beta1.SchemeGroupVersion\n\n\tstorage := map[string]rest.Storage{}\n\n\tif apiResourceConfigSource.ResourceEnabled(version.WithResource(\"horizontalpodautoscalers\")) {\n\t\thpaStorage, hpaStatusStorage := horizontalpodautoscaleretcd.NewREST(restOptionsGetter(extensions.Resource(\"horizontalpodautoscalers\")))\n\t\tstorage[\"horizontalpodautoscalers\"] = hpaStorage\n\t\tstorage[\"horizontalpodautoscalers/status\"] = hpaStatusStorage\n\n\t\tcontrollerStorage := expcontrolleretcd.NewStorage(restOptionsGetter(api.Resource(\"replicationControllers\")))\n\t\tstorage[\"replicationcontrollers\"] = controllerStorage.ReplicationController\n\t\tstorage[\"replicationcontrollers/scale\"] = controllerStorage.Scale\n\t}\n\tif apiResourceConfigSource.ResourceEnabled(version.WithResource(\"thirdpartyresources\")) {\n\t\tthirdPartyResourceStorage := thirdpartyresourceetcd.NewREST(restOptionsGetter(extensions.Resource(\"thirdpartyresources\")))\n\t\tstorage[\"thirdpartyresources\"] = thirdPartyResourceStorage\n\t}\n\n\tif apiResourceConfigSource.ResourceEnabled(version.WithResource(\"daemonsets\")) {\n\t\tdaemonSetStorage, daemonSetStatusStorage := daemonetcd.NewREST(restOptionsGetter(extensions.Resource(\"daemonsets\")))\n\t\tstorage[\"daemonsets\"] = daemonSetStorage\n\t\tstorage[\"daemonsets/status\"] = daemonSetStatusStorage\n\t}\n\tif apiResourceConfigSource.ResourceEnabled(version.WithResource(\"deployments\")) {\n\t\tdeploymentStorage := deploymentetcd.NewStorage(restOptionsGetter(extensions.Resource(\"deployments\")))\n\t\tstorage[\"deployments\"] = deploymentStorage.Deployment\n\t\tstorage[\"deployments/status\"] = deploymentStorage.Status\n\t\tstorage[\"deployments/rollback\"] = deploymentStorage.Rollback\n\t\tstorage[\"deployments/scale\"] = deploymentStorage.Scale\n\t}\n\tif apiResourceConfigSource.ResourceEnabled(version.WithResource(\"jobs\")) {\n\t\tjobsStorage, jobsStatusStorage := jobetcd.NewREST(restOptionsGetter(extensions.Resource(\"jobs\")))\n\t\tstorage[\"jobs\"] = jobsStorage\n\t\tstorage[\"jobs/status\"] = jobsStatusStorage\n\t}\n\tif apiResourceConfigSource.ResourceEnabled(version.WithResource(\"ingresses\")) {\n\t\tingressStorage, ingressStatusStorage := ingressetcd.NewREST(restOptionsGetter(extensions.Resource(\"ingresses\")))\n\t\tstorage[\"ingresses\"] = ingressStorage\n\t\tstorage[\"ingresses/status\"] = ingressStatusStorage\n\t}\n\tif apiResourceConfigSource.ResourceEnabled(version.WithResource(\"podsecuritypolicy\")) {\n\t\tpodSecurityExtensionsStorage := pspetcd.NewREST(restOptionsGetter(extensions.Resource(\"podsecuritypolicy\")))\n\t\tstorage[\"podSecurityPolicies\"] = podSecurityExtensionsStorage\n\t}\n\tif apiResourceConfigSource.ResourceEnabled(version.WithResource(\"replicasets\")) {\n\t\treplicaSetStorage := replicasetetcd.NewStorage(restOptionsGetter(extensions.Resource(\"replicasets\")))\n\t\tstorage[\"replicasets\"] = replicaSetStorage.ReplicaSet\n\t\tstorage[\"replicasets/status\"] = replicaSetStorage.Status\n\t\tstorage[\"replicasets/scale\"] = replicaSetStorage.Scale\n\t}\n\tif apiResourceConfigSource.ResourceEnabled(version.WithResource(\"networkpolicies\")) {\n\t\tnetworkExtensionsStorage := networkpolicyetcd.NewREST(restOptionsGetter(extensions.Resource(\"networkpolicies\")))\n\t\tstorage[\"networkpolicies\"] = networkExtensionsStorage\n\t}\n\n\treturn storage\n}\n\nfunc (p RESTStorageProvider) PostStartHook() (string, genericapiserver.PostStartHookFunc, error) {\n\treturn \"extensions/third-party-resources\", p.postStartHookFunc, nil\n}\nfunc (p RESTStorageProvider) postStartHookFunc(hookContext genericapiserver.PostStartHookContext) error {\n\tclientset, err := extensionsclient.NewForConfig(hookContext.LoopbackClientConfig)\n\tif err != nil {\n\t\tutilruntime.HandleError(fmt.Errorf(\"unable to initialize clusterroles: %v\", err))\n\t\treturn nil\n\t}\n\n\tthirdPartyControl := ThirdPartyController{\n\t\tmaster: p.ResourceInterface,\n\t\tclient: clientset,\n\t}\n\tgo wait.Forever(func() {\n\t\tif err := thirdPartyControl.SyncResources(); err != nil {\n\t\t\tglog.Warningf(\"third party resource sync failed: %v\", err)\n\t\t}\n\t}, 10*time.Second)\n\n\treturn nil\n}\n", "meta": {"content_hash": "8b3f61eaebe03acd27711fe244102bd2", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 203, "avg_line_length": 48.390625, "alnum_prop": 0.8224087826929286, "repo_name": "linearregression/kubernetes", "id": "f7e4497b5edcd195cb2915bd1158290f75fed077", "size": "6763", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pkg/registry/extensions/rest/storage_extensions.go", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "978"}, {"name": "Go", "bytes": "44736541"}, {"name": "HTML", "bytes": "2530253"}, {"name": "Makefile", "bytes": "72431"}, {"name": "Nginx", "bytes": "1013"}, {"name": "Protocol Buffer", "bytes": "580076"}, {"name": "Python", "bytes": "918751"}, {"name": "SaltStack", "bytes": "54088"}, {"name": "Shell", "bytes": "1542252"}]}} {"text": "\n/*\r\n * Copyright 2005 Jenia org.\r\n * \r\n * Licensed under the Apache License, Version 2.0 (the \"License\");\r\n * you may not use this file except in compliance with the License.\r\n * You may obtain a copy of the License at\r\n * \r\n * http://www.apache.org/licenses/LICENSE-2.0\r\n * \r\n * Unless required by applicable law or agreed to in writing, software\r\n * distributed under the License is distributed on an \"AS IS\" BASIS,\r\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n * See the License for the specific language governing permissions and\r\n * limitations under the License.\r\n */\r\npackage org.fckfaces.util;\r\n\r\nimport java.io.BufferedInputStream;\r\nimport java.io.IOException;\r\nimport java.io.InputStream;\r\nimport java.io.OutputStream;\r\nimport java.text.SimpleDateFormat;\r\nimport java.util.Date;\r\nimport java.util.Locale;\r\nimport java.util.TimeZone;\r\n\r\nimport javax.servlet.ServletConfig;\r\nimport javax.servlet.ServletException;\r\nimport javax.servlet.http.HttpServlet;\r\nimport javax.servlet.http.HttpServletRequest;\r\nimport javax.servlet.http.HttpServletResponse;\r\n\r\n/**\r\n * @author srecinto\r\n */\r\npublic class Servlet extends HttpServlet {\r\n\r\n\tprivate static final long serialVersionUID = 7260045528613530636L;\r\n\r\n\tprivate static final String modify=calcModify();\r\n\t\r\n\tprivate volatile String customResourcePath;\r\n\t\r\n\tprivate static final String calcModify() {\r\n\t\tDate mod = new Date(System.currentTimeMillis());\r\n\t\tSimpleDateFormat sdf = new SimpleDateFormat(\"EEE, d MMM yyyy HH:mm:ss z\",Locale.ENGLISH);\r\n\t\tsdf.setTimeZone(TimeZone.getTimeZone(\"GMT\"));\r\n\t\treturn sdf.format(mod);\r\n\t}\r\n\t\r\n\tpublic void init(ServletConfig config) throws ServletException { \r\n\t\tsuper.init(config); \r\n\t\tsetCustomResourcePath(config.getInitParameter(\"customResourcePath\"));\r\n\t} \r\n\r\n\tpublic void doGet(HttpServletRequest request, HttpServletResponse response)\r\n throws ServletException, IOException {\r\n\r\n // search the resource in classloader\r\n ClassLoader cl = this.getClass().getClassLoader();\r\n String uri = request.getRequestURI();\r\n String path = uri.substring(uri.indexOf(Util.FCK_FACES_RESOURCE_PREFIX)+Util.FCK_FACES_RESOURCE_PREFIX.length()+1);\r\n \r\n if(getCustomResourcePath() != null) { //Use custom path to FCKeditor\r\n \tthis.getServletContext().getRequestDispatcher(getCustomResourcePath() + path).forward(request,response);\r\n } else { //Use default FCKeditor bundled up in the jar\r\n \tif (uri.endsWith(\".jsf\") || uri.endsWith(\".html\")) {\r\n\t \tresponse.setContentType(\"text/html;charset=UTF-8\");\r\n\t } else {\r\n\t response.setHeader(\"Cache-Control\", \"public\");\r\n\t response.setHeader(\"Last-Modified\", modify);\r\n\t }\r\n\t if (uri.endsWith(\".css\")) {\r\n\t \tresponse.setContentType(\"text/css;charset=UTF-8\");\r\n\t } else if (uri.endsWith(\".js\")) {\r\n\t \tresponse.setContentType(\"text/javascript;charset=UTF-8\");\r\n\t } else if (uri.endsWith(\".gif\")) {\r\n\t \tresponse.setContentType(\"image/gif;\");\r\n\t } else if (uri.endsWith(\".xml\")) {\r\n\t \tresponse.setContentType(\"text/xml;charset=UTF-8\");\r\n\t } \r\n\t \r\n\t InputStream is = cl.getResourceAsStream(path);\r\n\t // if no resource found in classloader return nothing\r\n\t if (is==null) return;\r\n\t // resource found, copying on output stream\r\n\t OutputStream out = response.getOutputStream();\r\n\t byte[] buffer = new byte[2048];\r\n\t BufferedInputStream bis = new BufferedInputStream(is);\r\n\t try {\r\n\t \tint read = 0;\r\n\t \tread = bis.read(buffer);\r\n\t \twhile (read!=-1) {\r\n\t \t\tout.write(buffer,0,read);\r\n\t \t\tread = bis.read(buffer);\r\n\t \t}\r\n\t } finally {\r\n\t \tbis.close();\r\n\t }\r\n\t out.flush();\r\n\t out.close();\r\n }\r\n }\r\n\r\n\tpublic String getCustomResourcePath() {\r\n\t\treturn customResourcePath;\r\n\t}\r\n\r\n\tpublic void setCustomResourcePath(String customResourcePath) {\r\n\t\tsynchronized (this) {\r\n\t\t\tthis.customResourcePath = customResourcePath;\r\n\t\t}\r\n\t}\r\n\r\n}\r\n", "meta": {"content_hash": "6ed42d7c56e7b81ce6d2b58bc6d4a0ab", "timestamp": "", "source": "github", "line_count": 116, "max_line_length": 123, "avg_line_length": 35.44827586206897, "alnum_prop": 0.6551556420233463, "repo_name": "GIP-RECIA/esco-grouper-ui", "id": "27e498d6a8afc95cc0fca7adba5b38bfd7c7dc9d", "size": "4898", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ext/esup-commons/src/main/java/org/fckfaces/util/Servlet.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "218175"}, {"name": "Java", "bytes": "6786611"}, {"name": "JavaScript", "bytes": "1616913"}, {"name": "Shell", "bytes": "3445"}, {"name": "XSLT", "bytes": "3601"}]}} {"text": "namespace performance_manager {\n\nNodeAttachedData::NodeAttachedData() = default;\n\nNodeAttachedData::~NodeAttachedData() = default;\n\n// static\nvoid NodeAttachedDataMapHelper::AttachInMap(\n const Node* node,\n std::unique_ptr data) {\n GraphImpl* graph = GraphImpl::FromGraph(node->GetGraph());\n DCHECK_CALLED_ON_VALID_SEQUENCE(graph->sequence_checker_);\n const NodeBase* node_base = NodeBase::FromNode(node);\n DCHECK(graph->NodeInGraph(node_base));\n GraphImpl::NodeAttachedDataKey data_key =\n std::make_pair(node, data->GetKey());\n auto& map = graph->node_attached_data_map_;\n DCHECK(!base::Contains(map, data_key));\n map[data_key] = std::move(data);\n}\n\n// static\nNodeAttachedData* NodeAttachedDataMapHelper::GetFromMap(const Node* node,\n const void* key) {\n GraphImpl* graph = GraphImpl::FromGraph(node->GetGraph());\n DCHECK_CALLED_ON_VALID_SEQUENCE(graph->sequence_checker_);\n const NodeBase* node_base = NodeBase::FromNode(node);\n DCHECK(graph->NodeInGraph(node_base));\n GraphImpl::NodeAttachedDataKey data_key = std::make_pair(node, key);\n auto& map = graph->node_attached_data_map_;\n auto it = map.find(data_key);\n if (it == map.end())\n return nullptr;\n DCHECK_EQ(key, it->second->GetKey());\n return it->second.get();\n}\n\n// static\nstd::unique_ptr NodeAttachedDataMapHelper::DetachFromMap(\n const Node* node,\n const void* key) {\n GraphImpl* graph = GraphImpl::FromGraph(node->GetGraph());\n DCHECK_CALLED_ON_VALID_SEQUENCE(graph->sequence_checker_);\n const NodeBase* node_base = NodeBase::FromNode(node);\n DCHECK(graph->NodeInGraph(node_base));\n GraphImpl::NodeAttachedDataKey data_key = std::make_pair(node, key);\n auto& map = graph->node_attached_data_map_;\n auto it = map.find(data_key);\n\n std::unique_ptr data;\n if (it != map.end()) {\n data = std::move(it->second);\n map.erase(it);\n }\n\n return data;\n}\n\n} // namespace performance_manager\n", "meta": {"content_hash": "d91851c7c5cd4f0c2fd415f222c6fd67", "timestamp": "", "source": "github", "line_count": 59, "max_line_length": 75, "avg_line_length": 34.016949152542374, "alnum_prop": 0.6885899352267065, "repo_name": "scheib/chromium", "id": "666e7ba3a732469d807ea3b7ac06677e581efc11", "size": "2452", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "components/performance_manager/graph/node_attached_data.cc", "mode": "33188", "license": "bsd-3-clause", "language": []}} {"text": "\n\nimport { Component, OnInit } from '@angular/core';\n\n@Component({\n selector: 'app-comp-4914',\n templateUrl: './comp-4914.component.html',\n styleUrls: ['./comp-4914.component.css']\n})\nexport class Comp4914Component implements OnInit {\n\n constructor() { }\n\n ngOnInit() {\n }\n\n}\n", "meta": {"content_hash": "a64eea122cb5b0989b2721b78d29778c", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 50, "avg_line_length": 16.58823529411765, "alnum_prop": 0.6666666666666666, "repo_name": "angular/angular-cli-stress-test", "id": "12ea09fdbaa297c871b09ec8014594b826b15282", "size": "484", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/components/comp-4914/comp-4914.component.ts", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "1040888"}, {"name": "HTML", "bytes": "300322"}, {"name": "JavaScript", "bytes": "2404"}, {"name": "TypeScript", "bytes": "8535506"}]}} {"text": "\n\npackage org.apache.flink.runtime.webmonitor;\n\nimport org.apache.flink.api.common.JobID;\nimport org.apache.flink.api.common.time.Time;\nimport org.apache.flink.api.java.tuple.Tuple2;\nimport org.apache.flink.runtime.checkpoint.CompletedCheckpoint;\nimport org.apache.flink.runtime.clusterframework.types.ResourceID;\nimport org.apache.flink.runtime.executiongraph.AccessExecutionGraph;\nimport org.apache.flink.runtime.jobgraph.JobStatus;\nimport org.apache.flink.runtime.jobgraph.JobVertexID;\nimport org.apache.flink.runtime.jobmaster.JobResult;\nimport org.apache.flink.runtime.jobmaster.RescalingBehaviour;\nimport org.apache.flink.runtime.messages.Acknowledge;\nimport org.apache.flink.runtime.messages.FlinkJobNotFoundException;\nimport org.apache.flink.runtime.messages.webmonitor.ClusterOverview;\nimport org.apache.flink.runtime.messages.webmonitor.MultipleJobsDetails;\nimport org.apache.flink.runtime.metrics.dump.MetricQueryService;\nimport org.apache.flink.runtime.rest.handler.legacy.backpressure.OperatorBackPressureStatsResponse;\nimport org.apache.flink.runtime.rpc.RpcEndpoint;\nimport org.apache.flink.runtime.rpc.RpcGateway;\nimport org.apache.flink.runtime.rpc.RpcTimeout;\n\nimport java.util.Collection;\nimport java.util.concurrent.CompletableFuture;\n\n/**\n * Gateway for restful endpoints.\n *\n *

Gateways which implement this method run a REST endpoint which is reachable\n * under the returned address.\n */\npublic interface RestfulGateway extends RpcGateway {\n\n\t/**\n\t * Cancel the given job.\n\t *\n\t * @param jobId identifying the job to cancel\n\t * @param timeout of the operation\n\t * @return A future acknowledge if the cancellation succeeded\n\t */\n\tCompletableFuture cancelJob(JobID jobId, @RpcTimeout Time timeout);\n\n\t/**\n\t * Stop the given job.\n\t *\n\t * @param jobId identifying the job to stop\n\t * @param timeout of the operation\n\t * @return A future acknowledge if the stopping succeeded\n\t */\n\tCompletableFuture stopJob(JobID jobId, @RpcTimeout Time timeout);\n\n\t/**\n\t * Requests the REST address of this {@link RpcEndpoint}.\n\t *\n\t * @param timeout for this operation\n\t * @return Future REST endpoint address\n\t */\n\tCompletableFuture requestRestAddress(@RpcTimeout Time timeout);\n\n\t/**\n\t * Requests the {@link AccessExecutionGraph} for the given jobId. If there is no such graph, then\n\t * the future is completed with a {@link FlinkJobNotFoundException}.\n\t *\n\t * @param jobId identifying the job whose AccessExecutionGraph is requested\n\t * @param timeout for the asynchronous operation\n\t * @return Future containing the AccessExecutionGraph for the given jobId, otherwise {@link FlinkJobNotFoundException}\n\t */\n\tCompletableFuture requestJob(JobID jobId, @RpcTimeout Time timeout);\n\n\t/**\n\t * Requests the {@link JobResult} of a job specified by the given jobId.\n\t *\n\t * @param jobId identifying the job for which to retrieve the {@link JobResult}.\n\t * @param timeout for the asynchronous operation\n\t * @return Future which is completed with the job's {@link JobResult} once the job has finished\n\t */\n\tCompletableFuture requestJobResult(JobID jobId, @RpcTimeout Time timeout);\n\n\t/**\n\t * Requests job details currently being executed on the Flink cluster.\n\t *\n\t * @param timeout for the asynchronous operation\n\t * @return Future containing the job details\n\t */\n\tCompletableFuture requestMultipleJobDetails(\n\t\t@RpcTimeout Time timeout);\n\n\t/**\n\t * Requests the cluster status overview.\n\t *\n\t * @param timeout for the asynchronous operation\n\t * @return Future containing the status overview\n\t */\n\tCompletableFuture requestClusterOverview(@RpcTimeout Time timeout);\n\n\t/**\n\t * Requests the paths for the {@link MetricQueryService} to query.\n\t *\n\t * @param timeout for the asynchronous operation\n\t * @return Future containing the collection of metric query service paths to query\n\t */\n\tCompletableFuture> requestMetricQueryServicePaths(@RpcTimeout Time timeout);\n\n\t/**\n\t * Requests the paths for the TaskManager's {@link MetricQueryService} to query.\n\t *\n\t * @param timeout for the asynchronous operation\n\t * @return Future containing the collection of instance ids and the corresponding metric query service path\n\t */\n\tCompletableFuture>> requestTaskManagerMetricQueryServicePaths(@RpcTimeout Time timeout);\n\n\t/**\n\t * Triggers a savepoint with the given savepoint directory as a target.\n\t *\n\t * @param jobId ID of the job for which the savepoint should be triggered.\n\t * @param targetDirectory Target directory for the savepoint.\n\t * @param timeout Timeout for the asynchronous operation\n\t * @return A future to the {@link CompletedCheckpoint#getExternalPointer() external pointer} of\n\t * the savepoint.\n\t */\n\tdefault CompletableFuture triggerSavepoint(\n\t\t\tJobID jobId,\n\t\t\tString targetDirectory,\n\t\t\tboolean cancelJob,\n\t\t\t@RpcTimeout Time timeout) {\n\t\tthrow new UnsupportedOperationException();\n\t}\n\n\t/**\n\t * Dispose the given savepoint.\n\t *\n\t * @param savepointPath identifying the savepoint to dispose\n\t * @param timeout RPC timeout\n\t * @return A future acknowledge if the disposal succeeded\n\t */\n\tdefault CompletableFuture disposeSavepoint(\n\t\t\tfinal String savepointPath,\n\t\t\t@RpcTimeout final Time timeout) {\n\t\tthrow new UnsupportedOperationException();\n\t}\n\n\t/**\n\t * Request the {@link JobStatus} of the given job.\n\t *\n\t * @param jobId identifying the job for which to retrieve the JobStatus\n\t * @param timeout for the asynchronous operation\n\t * @return A future to the {@link JobStatus} of the given job\n\t */\n\tdefault CompletableFuture requestJobStatus(\n\t\t\tJobID jobId,\n\t\t\t@RpcTimeout Time timeout) {\n\t\tthrow new UnsupportedOperationException();\n\t}\n\n\t/**\n\t * Requests the statistics on operator back pressure.\n\t *\n\t * @param jobId Job for which the stats are requested.\n\t * @param jobVertexId JobVertex for which the stats are requested.\n\t * @return A Future to the {@link OperatorBackPressureStatsResponse} or {@code null} if the stats are\n\t * not available (yet).\n\t */\n\tdefault CompletableFuture requestOperatorBackPressureStats(\n\t\t\tJobID jobId,\n\t\t\tJobVertexID jobVertexId) {\n\t\tthrow new UnsupportedOperationException();\n\t}\n\n\t/**\n\t * Trigger rescaling of the given job.\n\t *\n\t * @param jobId specifying the job to rescale\n\t * @param newParallelism new parallelism of the job\n\t * @param rescalingBehaviour defining how strict the rescaling has to be executed\n\t * @param timeout of this operation\n\t * @return Future which is completed with {@link Acknowledge} once the rescaling was successful\n\t */\n\tdefault CompletableFuture rescaleJob(\n\t\t\tJobID jobId,\n\t\t\tint newParallelism,\n\t\t\tRescalingBehaviour rescalingBehaviour,\n\t\t\t@RpcTimeout Time timeout) {\n\t\tthrow new UnsupportedOperationException();\n\t}\n\n\tdefault CompletableFuture shutDownCluster() {\n\t\tthrow new UnsupportedOperationException();\n\t}\n}\n", "meta": {"content_hash": "5bf4160c1bf25c03ed67dd1d109e915f", "timestamp": "", "source": "github", "line_count": 191, "max_line_length": 127, "avg_line_length": 36.58115183246073, "alnum_prop": 0.7688564476885644, "repo_name": "zhangminglei/flink", "id": "6a6c34b90db227e5e19f530d1f253b8ddc8af25c", "size": "7792", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "flink-runtime/src/main/java/org/apache/flink/runtime/webmonitor/RestfulGateway.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "5666"}, {"name": "CSS", "bytes": "18100"}, {"name": "Clojure", "bytes": "63105"}, {"name": "CoffeeScript", "bytes": "91220"}, {"name": "Dockerfile", "bytes": "3528"}, {"name": "HTML", "bytes": "86821"}, {"name": "Java", "bytes": "39364056"}, {"name": "JavaScript", "bytes": "8267"}, {"name": "Python", "bytes": "249644"}, {"name": "Scala", "bytes": "7201411"}, {"name": "Shell", "bytes": "282124"}]}} {"text": "# Kinvey GeoTag\nThis is a Kinvey sample app, to location-based search, 3rd-Party location services, Push notifications, and Business Logic with Collection Hooks and Custom Endpoints. \n\nIn particular in addition to showing location-based data, this app allows user actions to trigger push notifications to other users that are near new notes and are interested in certain tags. \n\nFor more details about location, see the blog post at http://goo.gl/9dyMm.\nFor more details about business logic, see http://devcenter.kinvey.com/ios/guides/business-logic.\n\n## Using the App\nThe app shows a map highlighting the user's current location. The map is annotated with nearby hotels (from the 3rd-paty location service) as well as yours and other user's notes that match your selected tags. \n\nTap the \"page curl\" button to see a list of nearby tags. Selecting a tag will cause the user to subscribe to be notified for new notes with that tag as well as display those notes on the map. \n\nTo enter a new note, type in the text field. Any word preceded by a `#` will be added as a tag. \n\n## Set-up\n\n### Set up the App in Xcode\n\nAlso, you have to update `app-key` and `app-secret` in the file `KGAAppDelegate.m` to your app-key and app-secret from the Kinvey console. \n\nTo enable push, you need a push certificate from the Apple developer portal. Upload to Kinvey (under the push configuration) and enter the `Push Key` and `Push Secret` in the file `KGAAppDelegate.m. \n\n### Set up Locations services\n\nTo enable Data Integration with this app, just\n\n* Go to the **Locations** Add-On and select a provider. For example, choose \"FourSquare\" ![Enable Data Integration](https://github.com/KinveyApps/GeoTag-iOS/raw/master/Screenshots/Enable.png \"Enable Data Integration\")\n* Name the endpoint `hotels` and enter your foursquare credentials. Then press `Create Configuration`. ![Enter Credentials](https://github.com/KinveyApps/GeoTag-iOS/raw/master/Screenshots/Active.png \"Enter Credentials\")\n\n### Set up Collection Hook To Automatically Push to Nearby users\n1. From the `Addons` -> `Data & Storage` -> `Data Store` menu, add a new collection and call it `mapNotes`.\n2. From the `Addons` -> `Business Logic` -> `Collection Hooks` menu, select the `mapNotes` collection in the left menu. \n3. Select __`After` every `Save` run this function__ from the javascript area.\n4. Enter the code from [`after_save_mapnotes.js`](https://github.com/KinveyApps/GeoTag-iOS/raw/master/after_save_mapnotes.js) in the code window:\n\n```\nfunction onPostSave(request, response, modules){\n var push = modules.push, collectionAccess = modules.collectionAccess, logger = modules.logger;\n var userCollection = collectionAccess.collection('user');\n var body = request.body;\n if (body.tags && body._geoloc) {\n logger.info(\"added map note with tags: \" + body.tags +\", location: \"+ body._geoloc);\n var distanceInMiles = 5.0 /*5 mi radius*/ / 3963.192;\n var query = {\"tags\": {\"$in\":body.tags}, \"_geoloc\":{\"$nearSphere\": body._geoloc,\"$maxDistance\":distanceInMiles}};\n userCollection.find(query, function (err, userColl) {\n logger.info(\"got \" + userColl.length + \" users with matching tags.\");\n if (err) {\n logger.error('Query failed: '+ err);\n } else {\n userColl.forEach(function (user) {\n logger.info('Pushing message to ' + user.username);\n push.send(user, \"New notes for tag(s): \" + body.tags);\n });\n }\n response.continue();\n });\n } else {\n logger.info(\"no tags in \" + body);\n response.continue();\n }\n}\n```\n\nThis code does the following:\n\n1. Extracts any tags and location from the just saved `MapNote` object. \n2. Searches the user collection for users that are (a) last within 5 miles of the note, and (b) have subscribed at least of the new note's tags in it's `tags` field.\n3. For each of the users that satisfy these requirements, send a push notification letting them know a new note is available for those tags.\n4. __Next Step:__ An even better form of the push notification would be include the note's `_id` and just reload that note. Right now the app just displays an alert and reloads all the notes. \n\n\n### Set up Custom Endpoint\n1. From the `Addons` -> `Business Logic` -> `Custom Endpoints` menu, create a new endpoint called `tagsNearMe`.\n2. Enter the code from [`tagsNearMe.js`](https://github.com/KinveyApps/GeoTag-iOS/raw/master/tagsNearMe.js) in the code window:\n\n```\nfunction getTags(request,response,modules,user) {\n var headers = {\"Authorization\":request.headers.authorization}; //re-use the current user's ACLs rather than master secret\n var loc = user._geoloc;\n var qs = '{\"_geoloc\":{\"$nearSphere\":['+loc+'],\"$maxDistance\":\"10\"}}'; //find notes within 10 miles\n var uri = 'https://' + request.headers.host + '/appdata/'+request.appKey+'/mapNotes/?query='+qs; //build the request\n modules.request.get({uri: uri, headers: headers}, function(error, res, body){\n\tif (error){\n\t modules.logger.error(error);\n\t response.body = {error: error.message};\n response.complete(res.status);\n\t} else {\n\t //iterate through all the notes and count the tags\n\t var elements = JSON.parse(body);\n\t var tags = {};\n\t elements.forEach(function(doc){\n\t\tdoc.tags.forEach(function(tagar) {\n\t\t if (tags[tagar]) {\n\t\t\ttags[tagar]++;\n\t\t } else {\n\t\t\ttags[tagar] = 1;\n\t\t }\n\t\t});\n\t });\n\t response.body = tags; //return all the tags with their count, could create a count threshold in the future\n\t response.complete(200);\n\t}\n});\n}\n\nfunction onRequest(request, response, modules){\n var collectionAccess = modules.collectionAccess;\n //find the current user in the user collection\n collectionAccess.collection('user').find({\"username\": request.username}, function (err, userColl) {\n if (err) {\n response.body = {error: error.message};\n response.complete(434);\n } else {\n getTags(request,response,modules,userColl[0]);\n }\n });\n}\n``` \n\nThis code does the following:\n\n1. The `onRequest` method is called when the request comes in. \n2. This method looks up the user using `collectionAccess`. The user object is needed to obtain the user's location.\n3. In `getTags()` a call is made \"as the user\" using the user's `Authorization` header to `mapNotes` collection. This is done in order to respect the user's ACLs (as collectionAccess is done as the \"master secret\"). For GeoTag, this should not make a difference since `mapNotes` is globally readable by default.\n4. If the response is successful, it will be an array of `MapNotes` objects. This array is iterated over and the each of the tags is counted in a running total.\n5. After all the notes are counted, the totals object is returned to the app. \n6. __Next Step:__ A good next step is to limit the tags to the 20 most popular, or only display tags that have been used 5 or more times in order to limit the noisiness of the data. \n\n__NOTE:__ it is important to `complete()` the response in all terminal branches, or the client will timeout waiting for a response. \n\n## System Requirements\n* Xcode 4.5+\n* iPad/iPhone/iPod Touch\n* iOS 6+\n* KinveyKit 1.17.0+\n\n## Contact\nWebsite: [www.kinvey.com](http://www.kinvey.com)\n\nSupport: [support@kinvey.com](http://docs.kinvey.com/mailto:support@kinvey.com)\n\n## License\n\nCopyright (c) 2013 Kinvey, Inc.\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\nhttp://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.", "meta": {"content_hash": "ddb5b564568a45b03101ca899f155f7f", "timestamp": "", "source": "github", "line_count": 157, "max_line_length": 311, "avg_line_length": 49.67515923566879, "alnum_prop": 0.7218874214642903, "repo_name": "hraschke/spotl_kinvstart_loc", "id": "1e02cb366a1543528b77a0dd1b5ca6e4a0583b67", "size": "7799", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "1050"}, {"name": "C++", "bytes": "1771"}, {"name": "JavaScript", "bytes": "2531"}, {"name": "Objective-C", "bytes": "261794"}, {"name": "Shell", "bytes": "1993"}]}} {"text": "\n#ifndef __parserclass__\n#define __parserclass__\n\n#include \n#include \n#include \n#include \n#include \n\n/**\n\\mainpage pelet: Php Easy LanguagE Toolkit. A C++ library for analyzing PHP source code\n\\section Overview\nThis doc briefly describes the major design of the pelet parser library.\n\npelet has the folowing major components:\n\n- Parser\n- Lexer\n- Parser Implementation\n\n\\section Parser\nThe pelet::ParserClass, along with pelet::ClassObserverClass, pelet::ClassMemberObserverClass, \npelet::FunctionObserverClass, pelet::VariableObserverClass, and pelet::ExpressionObserverClass, make up\nthe \"driver\" (or main entry point) to pelet. ParserClass takes as input a string (or file) of PHP source code and \nextracts artifacts from it (classes, functions, methods, etc..).\n\nTo use the parser, a user will create a class that defines the callbacks for each PHP artifact (class, function,\nmethod, etc). The user will register these callbacks with the parser. When pelet::ParserClass::ScanFile is called, \nthe bison parser rules start looking for syntax rules. The parser will ask the lexer for tokens. As soon as a \nspecific rule is hit, then the proper parser observer callback gets called. For example, when the parser hits \nthe \"class\" rule (ie \"class MyClass {\") then the class observer will get called; and the observer will get the \nidentifer (\"MyClass\") along with other info (signature, comment).\n\nThe important thing to note here is that the callbacks happen while ParserClass::ScanFile still has control.\nParserClass::ScanFile does not return control until the entire file has been parsed; multiple callbacks will have\nbeen called before ParserClass::ScanFile returns. For this reason, it is important that ParserClass::ScanFile\nshould not be called within any of the observers.\n\nA word on concurrency: The pelet parser does not keep global state (it is a \"pure\" bison parser), but the pelet\nparser is not thread-safe. If pelet is used on a multi-threaded app, each thread should have its own instance\nof pelet::ParserClass.\n\n\\section Lexer \nThe pelet::LexicalAnalyzerClass is used to tokenize the source code (turn strings into tokens). The\ndetails of the implementation can be found in the \\ref LexerDetailsPage page.\n\n\\section ParserImplementation Parser Implementation\nThis is a parser generated with the help of Bison. This used to follow the PHP rules; \nfor example after \"function\" comes the function name; after \"if\" comes a \"(\" and so on...)\nWhen the syntax rules hit an artifact (for example a class) the syntax rules will call the proper observer.\nThe details of the implementation can be found in the \\ref ParserImplementationDetailsPage page.\n*/\nnamespace pelet {\n\n/**\n * Holds the results of the lint check. Currently lint check will stop when \n * the first error is encountered.\n */\nclass PELET_API LintResultsClass {\npublic:\n\n\t/**\n\t * A short description of the error; this is generated by the bison parser and is\n\t * not the most user-friendly but it is exactly what PHP displays; might as well\n\t * keep it consistant.\n\t */\n\tUnicodeString Error;\n\n\t/**\n\t * Path to the file in which the error ocurred. \n\t * This is what was given to the LintFile() method or ScanFile(std::string, LintResultsClass&) method. \n\t * For LintString() results this will be the empty string.\n\t */\n\tstd::string File;\n\n\t/**\n\t * Path to the file in which the error ocurred. \n\t * This is what was given to the LintFile() or ScanFile(FILE*, UnicodeString, LintResultsClass&) method.\n\t * For LintString() results this will be the empty string.\n\t */\n\tUnicodeString UnicodeFilename;\n\t\n\t/**\n\t * If the parser encountered a syntax error, then this object will be filled with \n\t * the last known class/method/function where the error occurred.\n\t */\n\tpelet::ScopeClass Scope;\n\n\t/**\n\t * The line in which the error ocurred. This is 1-based.\n\t */\n\tint LineNumber;\n\n\t/**\n\t * The character offset in which the error ocurred (with regargs to the start of\n\t * the file). This is 0-based.\n\t */\n\tint CharacterPosition;\n\n\tLintResultsClass();\n\n\t/**\n\t * copy the attributes from src to this object.\n\t */\n\tvoid Copy(const LintResultsClass& src);\n\t\n\t/**\n\t * remove any error string\n\t */\n\tvoid Clear();\n};\n\n/**\n * The parser class is designed in a way that can utilized by different pieces of code. The parser will analyze\n * given code and make calls to the different registered observers. There are observers for classes, functions, and \n * methods. Not all observers have to be set; for example if a FunctionObserverClass is never registered then the \n * parser will not notify when a function has been found in a piece of code.\n * \n * @code\n * class EchoAndObserverClass : public ClassObserverClass {\n * \n * virtual void ClassFound(const UnicodeString& className, const UnicodeString& signature, \n * const UnicodeString& comment) {\n * printf(\"Found Class %s\\n\", (const char*)className.ToUTF8());\n * }\n * }\n * \n * EchoAndObserverClass echoObserver;\n * ParserClass parser;\n * parser.SetClassObserver(&echoObserver);\n * wxString someFileName = wxT(\"/some/file.php\");\n * if (!parser.ScanFile(someFileName)) {\n * puts(\"Could not find file to parse!\");\n * }\n * @endcode\n * \n * Observers follow the PHP parsing rules to the letter. If source code is not valid; then observers may not\n * get called.\n *\n * Lint functionality\n * \n * The parser class has the ability to check PHP code for syntax errors. This is done via the LintXXX() methods.\n * \n * @code\n * ParserClass parser;\n * std::string file = \"/path/to/phpfile.php\";\n * LintResultsClass lintResults;\n * if (parser.LintFile(file, parserResults)) {\n * printf(\"No syntax errors in file %s\", (const char*)file.c_str());\n * }\n * else {\n * printf(\"%s. Error found in file %s on line %d.\\n\", parserResults.Error, file.c_str(), parserResults.LineNumber);\n * }\n * @endcode\n */\nclass PELET_API ParserClass {\n\n\npublic:\n\t\n\tParserClass();\n\t\n\t/**\n\t * Opens and scans the given file; This function will return once the entire\n\t * file has been parsed; it will call the proper observers when it encounters\n\t * a class, function, or variable declaration. This means that this\n\t * parser should not be modified in the observer calls.\n\t * \n\t * This is a convenience method, it does no handle unicode file names. For that,\n\t * see ScanFile(FILE*, UnicodeString, LintResultsClass)\n\t *\n\t * @param file the file to parse. Must be a full path.\n\t * @param LintResultsClass& results any error message will be populated here\n\t * @return bool if file was found and could be parsed successfully\n\t */\n\tbool ScanFile(const std::string& file, LintResultsClass& results);\n\n\t/**\n\t * Opens and scans the given file; This function will return once the entire\n\t * file has been parsed; it will call the proper observers when it encounters\n\t * a class, function, or variable declaration. This means that this\n\t * parser should not be modified in the observer calls. \n\t * This method is given a file pointer, it is useful for example when a file\n\t * with a unicode filename is opened by the caller.\n\t * \n\t * @param file the file to parse, this class will NOT own the file pointer\n\t * @param fileName this is the name that will be set in results.UnicodeFilename when an error happens\n\t * @param LintResultsClass& results any error message will be populated here\n\t * @return bool if file was found and could be parsed successfully\n\t */\n\tbool ScanFile(FILE* file, const UnicodeString& fileName, LintResultsClass& results);\n\t\n\t/**\n\t * Scans the given string. This function will return once the entire\n\t * string has been parsed; it will call the proper observers when it encounters\n\t * a class, function, or variable declaration. This means that this\n\t * parser should not be modified in the observer calls.\n\t * \n\t * @param const UnicodeString& code the code to parse.\n\t * @param LintResultsClass& results any error message will be populated here\n\t * @return bool if string could be parsed successfully\n\t */\n\tbool ScanString(const UnicodeString& code, LintResultsClass& results);\n\t\n\t/**\n\t * Change the version that this parser can handle. This needs to be called BEFORE ScanFile() or\n\t * ScanString()\n\t */\n\tvoid SetVersion(Versions version);\n\t\n\t/**\n\t * Set the class observer. The observer will get notified when a class is encountered.\n\t * Memory management of this pointer should be done by the caller.\n\t * \n\t * @param ClassObserverClass* observer the object to sent notifications to \n\t */\n\tvoid SetClassObserver(ClassObserverClass* observer);\n\t\n\t/**\n\t * Set the class member observer. The observer will get notified when a class member is encountered.\n\t * Memory management of this pointer should be done by the caller.\n\t * \n\t * @param ClassMemberObserverClass* observer the object to sent notifications to \n\t */\n\tvoid SetClassMemberObserver(ClassMemberObserverClass* observer);\n\t\n\t/**\n\t * Set the function observer. The observer will get notified when a function is encountered.\n\t * Memory management of this pointer should be done by the caller.\n\t * \n\t * @param FunctionObserverClass* observer the object to sent notifications to \n\t */\n\tvoid SetFunctionObserver(FunctionObserverClass* observer);\n\t\n\t/**\n\t * Set the variable observer. The observer will get notified when a new variable has been created.\n\t * Memory management of this pointer should be done by the caller.\n\t * \n\t * There are performance implications if you call this method; if you want to be notified \n\t * of variables then the full PHP parser is used; and parsing a file can be memory intensive.\n\t *\n\t * @param VariableObserverClass* observer the object to sent notifications to \n\t */\n\tvoid SetVariableObserver(VariableObserverClass* observer);\n\t\n\t/**\n\t * Set the expression observer. The observer will get notified when a new expression has been created.\n\t * Memory management of this pointer should be done by the caller.\n\t *\n\t * There are performance implications if you call this method; if you want to be notified \n\t * of expressions then the full PHP parser is used; and parsing a file can be memory intensive.\n\t *\n\t * @param ExpressionObserverClass* observer the object to sent notifications to \n\t */\n\tvoid SetExpressionObserver(ExpressionObserverClass* expressionObserver);\n\t\n\t/**\n\t * Perform a TRUE PHP syntax check on the entire file. This syntax check is based on PHP 5.3\n\t * or PHP 5.4 depending on whether SetVersion() was called.\n\t *\n\t * Note that this is not entirely the same as 'php -l' command; the PHP lint command detects \n\t * duplicate function / class names where as this lint check method does not.\n\t *\n\t * Returns true if the file had no syntax errors. Note that a file that does not have\n\t * any PHP code will be considered a good file (a PHP file that has only HTML is\n\t * considered good and true will be returned).\n\t *\n\t * This is a convenience method; unicode filenames are not handled.\n\t * \n\t * @param file the file to parse. Must be a full path.\n\t * @param LintResultsClass& results any error message will be populated here\n\t * @return bool true if file was found and had no syntax errors.\n\t */\n\tbool LintFile(const std::string& file, LintResultsClass& results);\n\n\t/**\n\t * Perform a TRUE PHP syntax check on the entire file. This syntax check is based on PHP 5.3\n\t * or PHP 5.4 depending on whether SetVersion() was called.\n\t *\n\t * Note that this is not entirely the same as 'php -l' command; the PHP lint command detects \n\t * duplicate function / class names where as this lint check method does not.\n\t *\n\t * Returns true if the file had no syntax errors. Note that a file that does not have\n\t * any PHP code will be considered a good file (a PHP file that has only HTML is\n\t * considered good and true will be returned).\n\t * \n\t * @param FILE* file the file to parse. Must be an opened file pointer, this class will NOT own the file pointer\n\t * @param fileName this is the name that will be set in results.UnicodeFilename when an error happens\n\t * @param LintResultsClass& results any error message will be populated here\n\t * @return bool true if file was found and had no syntax errors.\n\t */\n\tbool LintFile(FILE* file, const UnicodeString& filename, LintResultsClass& results);\n\t\n\t/**\n\t * Perform a syntax check on the given source code. Source code is assumed to be\n\t * all code (HTML will not be skipped, and will result in syntax errors). The PHP \n\t * open tag is optional.\n\t * Returns true if the code had no syntax errors.\n\t * \n\t * @param const UnicodeString& code the actual code to parse.\n\t * @param LintResultsClass& results any error message will be populated here\n\t * @return bool true if the code has no syntax errors.\n\t */\n\tbool LintString(const UnicodeString& code, LintResultsClass& results);\n\n\t/**\n\t * @return the character position where the parser is currently parsing. This can be called\n\t * inside an observer callback; in which case the character position is right PAST the\n\t * current token.\n\t */\n\tint GetCharacterPosition() const;\n\n\t\n\t/**\n\t * Parses a given PHP expression. This method will parse the given expression into a list of\n\t * of \"chained\" calls.\n\t *\n\t * A PHP expression is \n\t * - a variable ($obj)\n\t * - a function call (myFunc())\n\t * - an object operation (\"$obj->prop\")\n\t * - a static object operation (\"MyClass::Prop\")\n\t * \n\t * Object operations can be chained; like \"$obj->prop->anotherFunc()\". While indirect variables are allowed\n\t * in PHP (ie $this->$prop) this method will not handle them as it is nearly impossible to resolve them at parse time.\n\t *\n\t * The most extreme example is this expression: \"$obj->prop->anotherFunc()\"\n\t * This method will parse the expression into\n\t * $obj\n\t * ->prop\n\t * ->anotherFunc()\n\n\t * For example, if sourceCode represented this string:\n\t * \n\t * @code\n\t * UnicodeString sourceCode = UNICODE_STRING_SIMPLE(\"\n\t * class UserClass {\n\t * private $name;\n\t * \n\t * function getName() {\n\t * return $this->\n\t * \");\n\t * @endcode\n\t * then the following C++ code can be used to find a variable's type\n\t * \n\t * @code\n\t * ParserClass parser;\n\t * UnicodeString expression = UNICODE_STRING_SIMPLE(\"$this->\");\n\t * pelet::SymbolClass exprResult;\n\t * if (parser.ParseExpression(expression, exprResult)) {\n\t * \t// if successful, symbol.Lexeme will be set to \"$this\"\n\t * }\n\t * @endcode\n\t * \n\t * \n\t * @param expression the code string of the expression to resolve. This must be the code for a single expression.\n\t * Examples:\n\t * $anObject\n\t * $this->prop\n\t * $this->work()->another\n\t * $this->\n\t *\t\t work()->another\n\t *\t\t work()\n\t * self::prop\n\t * self::prop::\n\t * self::func()->prop\n\t * parent::prop\n\t * parent::fun()->prop\n\t * aFunction\n\t * An expression can have whitespace like this\n\t * $anObject\n\t *\t\t\t->method1()\n\t *\t\t\t->method2()\n\t *\t\t\t->method3()\n\t *\n\t * A special case that happens when the given expression ends with the object operator:\n\t * $this->\n\t * MyClass::\n\t * In this case, the operator will be added the chain list; this way the client code can determine that\n\t * the variable name actually ended.\n\t * @param expression the expression's name and \"chain\" list. The properties of this object will be reset every call.\n\t */\n\tvoid ParseExpression(UnicodeString expressionString, pelet::VariableClass& variable);\n\t\nprivate:\n\n\t/**\n\t * Clean up any resources after parsing a file. This is also very important if the \n\t * parser opens a string; without closing the string will not be released (if it's a\n\t * long string).\n\t */\n\tvoid Close();\n\n\t/**\n\t * Used to tokenize code\n\t */\n\tLexicalAnalyzerClass Lexer;\n\t\n\t/**\n\t * Notify the ClassObserver when a class has been found. Memory management of this pointer should be\n\t * done by the caller.\n\t */\n\tClassObserverClass* ClassObserver;\n\n\t/**\n\t * Notify the ClassMemberObserver when a class member has been found. Memory management of this pointer should be\n\t * done by the caller.\n\t */\n\tClassMemberObserverClass* ClassMemberObserver;\n\t\n\t/**\n\t * Notify the FunctionObserver when a function has been found. Memory management of this pointer should be\n\t * done by the caller.\n\t */\t\n\tFunctionObserverClass* FunctionObserver;\n\t\n\t/**\n\t * Notify the VariableObserver when a variable has been created. Memory management of this pointer should be\n\t * done by the caller.\n\t */\t\t\n\tVariableObserverClass* VariableObserver;\n\t\n\t\n\t/**\n\t * Notify the ExpressionObserver when an expressionhas been found. Memory management of this pointer should be\n\t * done by the caller.\n\t */\t\t\n\tExpressionObserverClass* ExpressionObserver;\n\t\n\t/**\n\t * The PHP version to handle\n\t */\n\tVersions Version;\n};\n\n\n\n}\n#endif // __parserclass__\n", "meta": {"content_hash": "f0e910471a31c351c4c7c7f1841f2b0f", "timestamp": "", "source": "github", "line_count": 440, "max_line_length": 120, "avg_line_length": 38.12954545454546, "alnum_prop": 0.7174107408952732, "repo_name": "robertop/pelet", "id": "c673ac60abd37abe03ad58f7e01f296b5b3a3b78", "size": "18051", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "include/pelet/ParserClass.h", "mode": "33188", "license": "mit", "language": [{"name": "C++", "bytes": "6035243"}, {"name": "HTML", "bytes": "12620"}, {"name": "Lua", "bytes": "15806"}, {"name": "Makefile", "bytes": "2367"}, {"name": "PHP", "bytes": "2884"}, {"name": "Shell", "bytes": "1468"}, {"name": "Yacc", "bytes": "661554"}]}} {"text": "\n#pragma once\n\nextern \"C\"\n{\n\n /*!\n * \n * \\brief Data structure to capture X-12-ARIMA options\n */\n typedef struct __X12ARIMA_OPTIONS__\n {\n // Data section\n long lStartDate; ///< is the serial date number for the start date of the time series.\n BOOL monthly; ///< is a flag to indicate whether data is monthly/quarterly.\n size_t nObs; ///< is the number of observations in the input time series.\n\n \n int transform; ///< Transform section (1=Log, 2=Auto and 3=None)\n\n // Outlier\n BOOL AOOutlier; ///< additive outlier adjustment\n BOOL TCOutlier; ///< temporary \n BOOL LSOutlier; ///< level shift outlier adjustment\n int LSRun; ///< level shift run\n\n // Regression section\n BOOL tradingDayRegression; ///< Calendar adjustment: trading days\n BOOL EasterRegression; ///< Calendar adjustment: easter holidays\n BOOL ConstantIntercept; ///< Add a linear trend?\n\n // ARIMA Modeling\n BOOL AutoSelect; ///< RegARIMA Modeling: Automodeling?\n int P; ///< RegARIMA Modeling: Manual, set the order of AR process\n int Q; ///< RegARIMA Modeling: Manual, set the order of MA process\n int D; ///< RegARIMA Modeling: Manual, differencing\n int PP; ///< RegARIMA Modeling: Manual, the order of seasonal AR process\n int QQ; ///< RegARIMA Modeling: Manual, the order of seasonal MA process\n int DD; ///< RegARIMA Modeling: Manual, Seasonal differencing\n\n // Forecast\n int nForecastYears; ///< [in] is the number of years to forecast for.\n double fAlpha; ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n\n // Seasonal Adjustment\n BOOL bSeasonalAdjustFilter; ///< is a switch to include seasonal adjustment in the analysis.\n int nX11Mode; ///< 1=mult, 2=add, 3=pseudoadd, 4=logadd\n int nX11Options; ///< 1= x11default, 2=s3x1, 3=s3x3, 4=s3x5, 5=s3x9, 6=s3x15, 7=stable\n\n int henderson; ///< henderson filter setting, default=13\n\n }X12ARIMA_OPTIONS;\n\n /*!\n * \\sa NDK_GLM_GOF()\n */\n typedef enum\n {\n GOF_LLF=1, ///< Log-likelihood goodness of fit measure\n GOF_AIC=2, ///< Akaike information criterion goodness of fit measure\n GOF_BIC=3, ///< Bayesian or Schwartz information criterion goodness of fit measure\n GOF_HQC=4, ///< Hannan\u0096Quinn information criterion goodness of fit measure\n GOF_RSQ=5, ///< R-squared goodness of fit measure\n GOF_ARSQ=6 ///< Adjusted R-squared goodness of fit measure\n }GOODNESS_OF_FIT_FUNC;\n\n /*!\n * \\sa NDK_ARMA_FIT()\n */\n typedef enum\n {\n FIT_MEAN=1, ///< Fitted conditional mean\n FIT_STDEV=2, ///< Fitted conditional volatility or standard deviation\n FIT_RESID=3, ///< Raw residuals (actual - fitted mean)\n FIT_STD_RESID=4 ///< Standardized residuals - (actual - fitted mean)/fitted volatility\n }FIT_RETVAL_FUNC;\n\n /*!\n * \\sa NDK_ARMA_RESID()\n */\n typedef enum\n {\n RESIDS_STD=1, ///< Standardized residuals \n RESIDS_RAW=2 ///< Raw residuals\n }RESID_RETVAL_FUNC;\n\n /*!\n * \\sa NDK_ARMA_PARAM()\n */\n typedef enum\n {\n PARAM_GUESS=1, ///< Quick guess (non-optimal) of parameters values\n PARAM_CALIBRATE=2, ///< Run a calibration process to find optimal values for the model's parameters\n PARAM_ERROR=3 ///< Compute the standard error of the parameters' values\n }MODEL_RETVAL_FUNC;\n\n /*!\n * \\sa NDK_ARMA_FORE()\n */\n typedef enum\n {\n FORECAST_MEAN=1, ///< Mean forecast value\n FORECAST_STDEV=2, ///< Forecast standard error (aka local volatility)\n FORECAST_TS_STDEV=3, ///< Volatility term structure\n FORECAST_LL=4, ///< Lower limit of the forecast confidence interval\n FORECAST_UL=5 ///< Upper limit of the forecast confidence interval\n }FORECAST_RETVAL_FUNC;\n\n /*!\n * \\ingroup statistical testing\n * \\brief Supported statistical test outputs\n * \\sa NDK_MEANTEST()\n */\n typedef enum\n {\n TEST_PVALUE=1, ///< P-value\n TEST_SCORE=2, ///< Test statistics (aka score)\n TEST_CRITICALVALUE=3 ///< Critical value\n }TEST_RETURN;\n\n\n /*!\n * \\ingroup statistical testing\n * \\sa NDK_NORMALTEST()\n */\n typedef enum \n {\n NORMALTEST_JB=1, ///< Jacque-Berra\n NORMALTEST_WS=2, ///< Shapiro-Wilson\n NORMALTEST_CHISQ=3 ///< Chi-Square test - Doornik and Hansen, \"An Omnibus Test for Normality\", 1994.\n }NORMALTEST_METHOD;\n\n\n /*!\n * \\ingroup statistical testing\n * \\sa NDK_ADFTEST()\n */\n typedef enum \n {\n ADFTEST_DRIFT_ONLY=1, ///< Model 1: A stochastic drift\n ADFTEST_DRIFT_N_CONST=2, ///< Model II: A deterministic constant and stochastic drift\n ADFTEST_DRIFT_N_TREND =3, ///< Model III: A deterministic trend and stochastic drift\n ADFTEST_DRIFT_N_CONST_N_TREND =4, ///< Model IV: A deterministic constant, trend and stochastic drift\n ADFTEST_DRIFT_N_CONST_TREND_TREND2 =5 ///< Model V: A deterministic constant, trend, trend^2 and stochastic drift\n }ADFTEST_OPTION;\n\n\n\n/*!\n * \\brief Support correlation methods\n * \\sa NDK_XCFTEST(), NDK_XCF()\n */\n typedef enum \n {\n XCF_PEARSON=1, ///< Pearson\n XCF_SPEARMAN=2, ///< Spearman\n XCF_KENDALL=3 ///< Kendall\n }CORRELATION_METHOD;\n\n\n /*!\n * \\brief Supported Link function\n * \\sa NDK_GLM_GOF()\n */\n typedef enum\n {\n GLM_LVK_IDENTITY=1, ///< Identity (default)\n GLM_LVK_LOG=2, ///< Log\n GLM_LVK_LOGIT=3, ///< Logit\n GLM_LVK_PROBIT=4, ///< Probit\n GLM_LVK_CLOGLOG=5 ///< Complementary log-log\n }GLM_LINK_FUNC;\n\n /*!\n * \\brief Supported innovation types\n * \\sa NDK_GARCH_PARAM(), \n */\n typedef enum\n {\n INNOVATION_GAUSSIAN=1, ///< Gaussian or normal distribution\n INNOVATION_TDIST=2, ///< Standardized student's T-distribution\n INNOVATION_GED=3 ///< Standardized generalized error distribution (GED)\n }INNOVATION_TYPE;\n\n /*!\n * \\brief Supported innovation types\n * \\sa NDK_TREND()\n */\n typedef enum\n {\n TREND_LINEAR=1, ///< Linear time trend\n TREND_POLYNOMIAL=2, ///< Polynomial time trend\n TREND_EXPONENTIAL=3, ///< Exponential time trend\n TREND_LOGARITHMIC=4, ///< Logarithmic time trend\n TREND_POWER=5 ///< Power time trend\n }TREND_TYPE;\n\n /*!\n * \\brief multi-colinearity test method\n * \\sa NDK_COLNRTY_TEST()\n */\n typedef enum\n {\n COLNRTY_CN=1, ///< Condition Number\n COLNRTY_VIF=2, ///< Variation Inflation Factor (VIF)\n COLNRTY_DET=3, ///< Determinant\n COLNRTY_EIGEN=4 ///< Eigenvalues\n }COLNRTY_TEST_TYPE;\n\n /*!\n * \\brief Periodogram method options\n * \\sa NDK_PERIODOGRAM()\n */\n typedef enum\n {\n PERIODOGRAM_NONE=1, ///< don't process the input data\n PERIODOGRAM_DETREND=2, ///< detrend the input data\n PERIODOGRAM_DIFFERENCE=3, ///< difference the time series (1,1)\n PERIODOGRAM_AUTOPROC=4 ///< Auto-process (e.g. detrend, difference, etc.) the input data.\n }PERIODOGRAM_OPTION_TYPE;\n\n\n /*!\n * \\brief Imputation methods for resampling\n * \\sa NDK_RESAMPLE()\n */\n typedef enum\n {\n IMPUTATION_NONE = 0, ///< don't process the input data\n IMPUTATION_INTERPOLATE_FWD = 1, ///< flat forward\n IMPUTATION_INTERPOLATE_BKWD = 2, ///< flat backward\n IMPUTATION_INTERPOLATE_LINEAR = 3, ///< Linear interpolation\n IMPUTATION_INTERPOLATE_CSPLINE = 4, ///< cubic spline\n IMPUTATION_FFT = 5 ///< Fast Fourier transform\n }IMPUTATION_METHOD;\n\n typedef enum\n {\n X13TRANSFOR_NONE = 0, ///< don't process the input data\n X13TRANSFOR_AUTO = 1, ///< don't process the input data\n X13TRANSFOR_LOG = 2, ///< don't process the input data\n X13TRANSFOR_SQRT = 3, ///< don't process the input data\n X13TRANSFOR_INV = 4, ///< don't process the input data\n X13TRANSFOR_LOGIST = 5, ///< don't process the input data\n X13TRANSFOR_BOXCOX = 6 ///< don't process the input data\n }X13TRANSFORM_METHOD;\n\n typedef enum\n {\n X13PRIORADJUST_RATIO = 0, ///< \n X13PRIORADJUST_PERCENT = 1, ///< \n X13PRIORADJUST_DIFF = 2 ///< \n }X13PRIORADJUST_TYPE;\n\n typedef enum\n {\n X11_MODE_MULT = 0, ///< \n X11_MODE_ADD = 1, ///< \n X11_MODE_PSEUDOADD = 2, ///< \n X11_MODE_LOGADD = 3 ///< \n }X11_MODE_TYPE;\n\n\n typedef enum\n {\n X11_SEASONALMA_3x1 = 0, ///< \n X11_SEASONALMA_3x3 = 1, ///< \n X11_SEASONALMA_3x5 = 2, ///< \n X11_SEASONALMA_3x9 = 3, ///< \n X11_SEASONALMA_3x15 = 4, ///< \n X11_SEASONALMA_STABLE = 5, ///< \n X11_SEASONALMA_DEFAULT = 6, ///< 3x3 MA and 3x5\n X11_SEASONALMA_MSR=7 ///< X-11-ARIMA88\n }X11_SEASONALMA_TYPE;\n\n\n\n}\n\n// Functions API\nextern \"C\"\n{\n\n /// \\name Initialization APIs\n /// @{\n /*! \n * @brief Initializes the SFSDK Library\n * @details This function should be the first API called in the SDK; It initializes the SDK library dependencies:\n * 1. Logging system\n * 2. License system\n * 3. Database system\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n *\n * \\sa SFMacros.h, NDK_Shutdown()\n */\n /*! \n @note 1. This is the first SDK API\n\n @code\n int nRet = NDK_FAILED;\n\n char szAppName[]=\"MyApp\";\n nRet = NDK_Init( szAppName, // we have a MyApp.conf file\n NULL, // use the license key in the license file (NumXL.lic)\n NULL, // use the activation code in the license file (NumXL.lic)\n NULL); // use the temp directory in current user's profile \n // (Windows 7) (c:\\users\\(username)\\AppData\\Local\\MyApp)\n // (Windows XP) (c:\\Local Settings\\(username)\\AppData\\Local\\MyApp)\n\n if( nRet >= NDK_SUCCES){\n ...\n ....\n }\n @endcode\n */\n int __stdcall NDK_Init( LPCTSTR szAppName, ///< [in] is the application name (user-defined), but must match the configuration base filename.\n LPCTSTR szKey, ///< [in, optional] is the NumXL license key. If missing (NULL), NDK_Init will attempt to locate the license key & activation code in the system.\n LPCTSTR szActCode, ///< [in, optional] is the license activation code. If missing (NULL), NDK_Init will attempt to locate the license key & activation code in the system.\n LPCTSTR szTmpPath ///< [in, optional] is the full path of the log file directory. If NULL, NDK reverts to the temporary directory in the current user's profile.\n ); \n \n \n /*! \n * @brief Shutdown and release resources used by the SFSDK Library\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful\n * \\retval Others see \\ref SFMacros.h\n * \\sa SFMacros.h, NDK_Init()\n */\n /*! \n @code\n int nRet = NDK_FAILED;\n\n ...\n nRet= NDK_Shutdown(); // This is the last SDK API called.\n // Check for error\n if( nRet < NDK_SUCCESS){\n ...\n }\n\n @endcode\n */\n int __stdcall NDK_Shutdown(void); \n\n // Examples\n /// \\example sdk_init.cpp\n\n /// @}\n\n /// \\name Descriptive Statistics\n /// @{\n\n // Time series statistics\n // General statistics\n /*! \n * \\brief Calculates the sample excess kurtosis.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n The time series is homogeneous or equally spaced. \n * \\note 1. The data sample may include missing values (e.g. #N/A).\n * \\note 2. The test hypothesis for the population excess kurtosis:\n\t\t\t\t\\f$H_{o}: K=0\\f$\n\t\t\t\t\\f$H_{1}: K\\neq 0\\f$, where:\n\t\t\t\t\t\\f$H_{o}\\f$ is the null hypothesis.\n\t\t\t\t\t\\f$H_{1}\\f$ is the alternate hypothesis.\n * \\note 3. For the case in which the underlying population distribution is normal, the sample excess kurtosis also has a normal sampling distribution:\n\t\t\t\t\\f$\\hat K \\sim N(0,\\frac{24}{T})\\f$, where:\n\t\t\t\t\t\\f$\\hat k\\f$ is the sample excess kurtosis (i.e. 4th moment).\n\t\t\t\t\t\\f$T\\f$ is the number of non-missing values in the data sample.\n\t\t\t\t\t\\f$N(.)\\f$ is the normal (i.e. gaussian) probability distribution function.\n * \\note 4. Using a given data sample, the sample excess kurtosis is calculated as:\n\t\t\t\t\\f$\\hat K (x)= \\frac{\\sum_{t=1}^T(x_t-\\bar x)^4}{(T-1)\\hat \\sigma^4}-3\\f$, where:\n\t\t\t\t\t\\f$\\hat K(x)\\f$ is the sample excess kurtosis.\n\t\t\t\t\t\\f$x_i\\f$ is the i-th non-missing value in the data sample.\n\t\t\t\t\t\\f$T\\f$ is the number of non-missing values in the data sample.\n\t\t\t\t\t\\f$\\hat \\sigma\\f$ is the sample standard deviation.\n * \\note 5. The underlying population distribution is assumed normal (gaussian)..\n * \\note 6. This is a two-sides (i.e. two-tails) test, so the computed p-value should be compared with half of the significance level \\f$\\frac{\\alpha}{2}\\f$.\n\n * \\sa NDK_XKURTTEST(), NDK_GED_XCF(), NDK_TDIST_XKURT()\n */\n int __stdcall NDK_XKURT(double* X, ///< [in] is the input data sample (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD reserved, ///< [in] This parameter is reserved and must be 1.\n double* retVal ///< [out] is the calculated sample excess-kurtosis value.\n );\n /*! \n * \\brief Calculates the sample skewness.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ACF_SKEWTEST()\n */\n int __stdcall NDK_SKEW( double* X, ///< [in] is the input data sample (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD reserved, ///< [in] This parameter is reserved and must be 1.\n double* retVal ///< [out] is the calculated sample skew value.\n );\n /*! \n * \\brief Calculates the sample average.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ACF_MEANTEST()\n */\n int __stdcall NDK_AVERAGE(double* X, ///< [in] is the input data sample (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD reserved, ///< [in] This parameter is reserved and must be 1.\n double* retVal ///< [out] is the calculated average value.\n );\n\n /*!\n * \\brief Calculates the geometric mean of the sample\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_AVERAGE()\n */\n int __stdcall NDK_GMEAN(double* X, ///< [in] is the input data sample (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD reserved, ///< [in] This parameter is reserved and must be 1.\n double* retVal ///< [out] is the calculated geometric average value.\n );\n\n /*! \n * \\brief Calculates the sample variance.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ACF_STDEVTEST()\n */\n int __stdcall NDK_VARIANCE(double* X, ///< [in] is the input data sample (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD reserved, ///< [in] This parameter is reserved and must be 1.\n double* retVal ///< [out] is the calculated variance value.\n );\n\n /*! \n * \\brief Calculates the minimum value in a given sample.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_MAX(), NDK_QAUNTILE(), NDK_IQR()\n */\n int __stdcall NDK_MIN( double* X, ///< [in] is the input data sample (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD reserved, ///< [in] This parameter is reserved and must be 1.\n double* retVal ///< [out] is the calculated minimum value.\n );\n /*! \n * \\brief Calculates the maximum value in a given sample.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_MIN(), NDK_QAUNTILE()\n */\n int __stdcall NDK_MAX(double* X, ///< [in] is the input data sample (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD reserved, ///< [in] This parameter is reserved and must be 1.\n double* retVal ///< [out] is the calculated maximum value.\n );\n /*! \n * \\brief Returns the sample p-quantile of the non-missing observations (i.e. divides the sample data into equal parts determined by the percentage p). \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The time series may include missing values (NaN), but they will not be included in the calculations.\n * \\note 2. The quantile function for any distribution is defined between 0 and 1. Its function is the inverse of the cumulative distribution function (CDF).\n * \\note 3. The quantile function returns the sample median when \\f$p=0.5\\f$.\n * \\note 4. The quantile function returns the sample minimum when \\f$p=0\\f$.\n * \\note 5. The quantile function returns the sample maximum when \\f$p=1\\f$.\n * \\note 6. For any probability distribution, the following holds true for the probability \\f$p\\f$:\n\t\t\t\t-\\f$P(X< q)\\geq p\\f$, where: \n\t\t\t\t\t-\\f$q\\f$ is the sample \\f$p\\f$-quantile.\n * \\sa NDK_IQR(), NDK_MIN(), NDK_MAX()\n */\n int __stdcall NDK_QUANTILE( double* X, ///< [in] is the input data sample (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n double p, ///< [in] is a scalar value between 0 and 1 (exclusive).\n double* retVal ///< [out] is the calculated p-th quantile value.\n );\n /*! \n * \\brief Returns the interquartile range (IQR), also called the midspread or middle fifty. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The input time series data may include missing values (NaN), but they will not be included in the calculations.\n * \\note 2. The interquartile range is defined as follows:\n\t\t\t\t-\\f$\\textup{IQR}=Q_3-Q_1\\f$, where:\n\t\t\t\t\t-\\f$Q_3\\f$ is the third quartile.\n\t\t\t\t\t-\\f$Q_1\\f$ is the first quartile.\n * \\note 3. Interquartile range (IQR) is a robust statistic because it has a break down point of 25%. It is often preferred to the total range.\n * \\sa NDK_QUANTILE(), NDK_MIN(), NDK_MAX()\n */\n int __stdcall NDK_IQR(double* X, ///< [in] is the input data sample (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n double* retVal ///< [out] is the calculated IQR value.\n );\n\n /*! \n * \\brief Returns the sorted sample data\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_SORT_ASC( double* X, ///< [inout] is the input data sample (a one dimensional array).\n size_t N ///< [in] is the number of observations in X.\n );\n /*! \n * \\brief Calculates the Hurst exponent (a measure of persistence or long memory) for time series.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n */\n /*!\n * \\htmlonly\n *

References

\n *
    \n *
  • [1] A.A.Anis, E.H.Lloyd (1976) The expected value of the adjusted rescaled Hurst range of independent normal summands, Biometrica 63, 283-298.
  • \n *
  • [2] H.E.Hurst (1951) Long-term storage capacity of reservoirs, Transactions of the American Society of Civil Engineers 116, 770-808.
  • \n *
  • [3] E.E.Peters (1994) Fractal Market Analysis, Wiley.
  • \n *
  • [4] R.Weron (2002) Estimating long range dependence: finite sample properties and confidence intervals, Physica A 312, 285-299.
  • \n *
\n * \\endhtmlonly \n */\n /*!\n *\t \\note 1. The input data series must have at least 9 non-missing values. Otherwise, Hurst function returns #NDK_FAILED.\n * \\note 2. The input data series may include missing values (NaN), but they will not be included in the calculations. \n * \\note 3. The Hurst exponent, \\f$h\\f$, is defined in terms of the rescaled range as follows:\n\t\t\t\t\\f$E \\left [ \\frac{R(n)}{S(n)} \\right ]=Cn^H \\textup{ as } n \\to \\infty \\f$\n * \\note 4. Where:\n\t\t\t\t-\\f$\\left [ \\frac{R(n)}{S(n)} \\right ]\\f$ is the Rescaled Range. \n\t\t\t\t-\\f$E \\left [x \\right ]\\f$ is the expected value.\n\t\t\t\t-\\f$n\\f$ is the time of the last observation (e.g. it corresponds to \\f$X_n\\f$ in the input time series data.) \n\t\t\t\t-\\f$h\\f$ is a constant. of\n * \\note 5. The Hurst exponent is a measure autocorrelation (persistence and long memory):\n\t\t\t\t-A value of \\f$0\n\t\t\t\t\\f$m=\\dfrac{1}{n} \\sum_{i=1}^{n} X_i\\f$\n\t\t\t\t2. Create a mean adjusted series:
\n\t\t\t\t\\f$Y_t=X_{t}-m \\textup{ for } t=1,2, \\dots ,n\\f$\n\t\t\t\t3. Calculate the cumulative deviate series Z:
\n\t\t\t\t\\f$Z_t= \\sum_{i=1}^{t} Y_{i} \\textup{ for } t=1,2, \\dots ,n\\f$\n\t\t\t\t4. Create a range series R:
\n\t\t\t\t\\f$R_t = max\\left (Z_1, Z_2, \\dots, Z_t \\right )- min\\left (Z_1, Z_2, \\dots, Z_t \\right ) \\textup{ for } t=1,2, \\dots, n\\f$\n\t\t\t\t5. Create a standard deviation series R:
\n\t\t\t\t\\f$S_{t}= \\sqrt{\\dfrac{1}{t} \\sum_{i=1}^{t}\\left ( X_{i} - u \\right )^{2}} \\textup{ for } t=1,2, \\dots ,n\\f$\n\t\t\t\tWhere:
\n\t\t\t\t\\f$h\\f$ is the mean for the time series values \\f$X_1,X_2, \\dots, X_t\\f$\n * \\note 8. Calculate the rescaled range series (R/S):
\n\t\t\t\t\\f$\\left ( R/S \\right )_{t} = \\frac{R_{t}}{S_{t}} \\textup{ for } t=1,2, \\dots, n\\f$\n * \\sa NDK_GINI()\n */\n int __stdcall NDK_HURST_EXPONENT( double* X, ///< [in] is the input data sample (a one dimensional array). \n size_t N, ///< [in] is the number of observations in X.\n double alpha, ///< [in] is the statistical significance level (1%, 5%, 10%). If missing, a default of 5% is assumed.\n WORD retType, ///< [in] is a number that determines the type of return value: \n /// 1 = Empirical Hurst exponent (R/S method)\n /// 2 = Anis-Lloyd/Peters corrected Hurst exponent\n /// 3 = Theoretical Hurst exponent\n /// 4 = Upper limit of the confidence interval\n /// 5 = Lower limit of the confidence interval\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n /*! \n * \\brief Returns the sample Gini coefficient, a measure of statistical dispersion. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n *\t \\note 1. A low Gini coefficient indicates a more equal distribution, with 0 corresponding to complete equality. Higher Gini coefficients indicate more unequal distributions, with 1 corresponding to complete inequality. \n * \\note 2. The input data series may include missing values (NaN), but they will not be included in the calculations. \n * \\note 3. The values in the input data series must be non-negative.\n * \\note 4. The Gini coefficient is computed as follows:\n \\f$G(S)=1-\\frac{2}{n-1}\\left ( n-\\frac{\\sum_{i=1}^{n}iy_i}{\\sum_{i=1}^{n}y_i} \\right )\\f$\n * \\note 5. Where:\n\t\t\t\t- \\f$h\\f$ is the input data series (\\f$h\\f$) arranged in descending order, so that \\f$y_i\\leq y_{i+1}\\f$. \n\t\t\t\t- \\f$n\\f$ is the number of non-missing values in the input time series data sample. \n\n * \\note 6. The Gini coefficient value can range from 0 to 1 and is half the NDK_RMD().\n * \\note 7. \\f$G(S)\\f$ is a consistent estimator of the population Gini coefficient, but is generally unbiased (except when the population mean is known).\n * \\note 8. Developed by the Italian statistician Corrado Gini in 1912, the Gini coefficient is commonly used as a measure of comparative income or wealth. Where zero (0) corresponds to complete equality and one (1) to complete inequality.\n * \\sa NDK_HURST_EXPONENT()\n */\n int __stdcall NDK_GINI(double* x, ///< [in] is the input data sample (must be non-negative) (a one dimensional array of values). \n\t\t\t\t\t\t size_t N, ///< [in] is the number of observations in X.\n\t\t\t\t\t\t double* retVal ///< [out] is the calculated value of this function.\n );\n\n /*! \n * \\brief Calculates the cross-correlation function between two time series. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The time series is homogeneous or equally spaced. \n * \\note 2. The two time series must be identical in size. \n * \\note 3. The Pearson correlation, \\f$r_{xy}\\f$, is defined as follows:\n\t\t\t\t-\\f$r_{xy}= \\frac{\\sum_{i=1}^N(x_i-\\bar{x})(y_i-\\bar{y})}{\\sqrt{\\sum_{i=1}^N(x_i-\\bar{x})^2\\times\\sum_{i=1}^N(y_i-\\bar{y})^2}}\\f$, where:\n\t\t\t\t\t-\\f$\\bar{x}\\f$ is the sample average of time series X.\n\t\t\t\t\t-\\f$\\bar{y}\\f$ is the sample average of time series Y.\n\t\t\t\t\t-\\f$x_i \\in X\\f$ is a value from the first input time series data.\n\t\t\t\t\t-\\f$y_i \\in Y\\f$ is a value from the second input time series data.\n\t\t\t\t\t-\\f$N\\f$ is the number of pairs \\f$\\left ( x_i,y_i \\right )\\f$ that do not contain a missing observation.\n * \\sa NDK_ACF(), NDK_XCF()\n */\n int __stdcall NDK_XCF(double* X, ///< [in] is the first univariate time series data (a one dimensional array). \n\t\t\t\t\t\tdouble* Y, ///< [in] is the second univariate time series data (a one dimensional array). \n\t\t\t\t\t\tsize_t N, ///< [in] is the number of observations in X.\n\t\t\t\t\t\tsize_t K, ///< [in] is the lag order (e.g. 0=no lag, 1=1st lag, etc.) to use with the second time series input (X). If missing, a default lag order of zero (i.e. no-lag) is assumed. \n\t\t\t\t\t\tWORD method, ///< [in] is the algorithm to use for calculating the correlation (see #CORRELATION_METHOD)\n\t\t\t\t\t\tWORD retType, ///< [in] is a switch to select the return output (1 = correlation value(default), 2 = std error).\n\t\t\t\t\t\tdouble* retVal ///< [out] is the calculated value of this function.\n ); \n\n /*! \n * \\brief Returns the sample root mean square (RMS).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The input time series data may include missing values (NaN), but they will not be included in the calculations. \n * \\note 2. The root mean square (RMS) is defined as follows for a set of \\f$n\\f$ values \\f${x_1,x_2,...,x_n}\\f$:\n\t\t\t\t- \\f$\\textrm{RMS}=\\sqrt{\\frac{x_1^2+x_2^2+\\cdots +x_N^2}{N}} =\\sqrt{\\frac{\\sum_{i=1}^N {x_i^2}}{N}}\\f$\n * \\note 3. Where:\n\t\t\t\t- \\f$x_i\\f$ is the value of the i-th non-missing observation.\n\t\t\t\t- \\f$N\\f$ is the number of non-missing observations in the input sample data.\n * \\note 4. The root mean square (RMS) is a statistical measure of the magnitude of a varying quantity.\n * \\note 5. The root mean square (RMS) has an interesting relationship to the mean (\\f$\\bar{x}\\f$) and the population standard deviation (\\f$\\sigma\\f$), such that:\n\t\t\t\t- \\f$\\textrm{RMS}^2=\\bar{x}^2+\\sigma^2\\f$\n * \\sa NDK_MD(), NDK_RMD()\n */\n int __stdcall NDK_RMS(double* X, ///< [in] is the input data sample (a one/two dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD reserved, ///< [in] This parameter is reserved and must be 1.\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n /*! \n * \\brief Returns the mean difference of the input data series.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n \\note 1. The time series may include missing values (NaN), but they will not be included in the calculations. \n * \\note 2. The sample mean difference (MD) is computed as follows:\n\t\t\t\t- \\f$\\Delta = \\textup{MD} = \\frac{\\sum_{i=1}^n \\sum_{j=1}^n \\| x_i - x_j \\|}{n \\times \\left ( n-1 \\right )}\\f$\n * \\note 3. Where:\n\t\t\t\t- \\f$x_i\\f$ is the value of the i-th non-missing observation.\n\t\t\t\t- \\f$n\\f$ is the number of non-missing observations in the sample.\n * \\note 4. The mean difference is the product of the sample mean and the relative mean difference (RMD) and so can also be expressed in terms of the NDK_GINI:\n\t\t\t\t- \\f$\\textup{MD}= 2 \\times G \\times \\bar{x}\\f$\n * \\note 5. Where:\n\t\t\t\t- \\f$\\bar{x}\\f$ is the arithmetic sample mean. \n\t\t\t\t- \\f$G\\f$ is the NDK_GINI.\n * \\note 6. Because of its ties to the Gini coefficient, the mean difference is also called the \"Gini mean difference.\" It is also known as the \"absolute mean difference.\" \n * \\note 7. The sample mean difference is not dependent on a specific measure of central tendency like the standard deviation. \n * \\note 8. The mean difference of a sample is an unbiased and consistent estimator of the population mean difference. \n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_MD(double* pData, ///< [in] is the input data series (one/two dimensional array).\n\t\t\t\t\t size_t nSize, ///< [in] is the number of observations in pData.\n\t\t\t\t\t WORD reserved, ///< [in] This parameter is reserved and must be 1.\n\t\t\t\t\t double* retVal ///< [out] is the computed value.\n\t\t\t\t\t );\n\n /*! \n * \\brief Returns the sample relative mean difference.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The time series may include missing values (NaN), but they will not be included in the calculations.\n * \\note 2. The relative mean difference is defined in terms of the NDK_MD as follows:\n\t\t\t\t- \\f$\\textup{RMD}= \\frac{\\textup{MD}}{\\bar{x}}\\f$\n\n * \\note 3: Where:\n\t\t\t\t-\\f$\\bar{x}\\f$ is the sample mean (average) of the time series.\n\t\t\t\t-\\f$\\textup{MD}\\f$ is the mean difference of the time series.\n * \\note 4: The RMD is also equal to twice the NDK_GINI. \n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_RMD(double* X, ///< [in] is the input data sample (a one/two dimensional array).\n\t\t\t\t\t\tsize_t N, ///< [in] is the number of observations in X.\n\t\t\t\t\t\tWORD reserved, ///< [in] This parameter is reserved and must be 1.\n\t\t\t\t\t\tdouble* retVal ///< [out] is the calculated value of this function.\n\t\t\t\t\t\t);\n\n /*! \n * \\brief Returns the sample median of absolute deviation (MAD).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The input data series may include missing values (NaN), but they will not be included in the calculations.\n * \\note 2. The median of absolute deviation (MAD) is defined as follows:\n\t\t\t\t-\\f$\\operatorname{MAD} = \\operatorname{median}_{i}\\left(\\ \\left| X_{i} - \\operatorname{median}_{j} (X_{j}) \\right|\\ \\right)\\f$\n * \\note 3. In short, starting with the deviations from the data's median, the MAD is the median of their absolute values.\n * \\note 4. The median of absolute deviation (MAD) is a measure of statistical dispersion.\n * \\note 5. MAD is a more robust estimator of scale than the sample variance or standard deviation.\n * \\note 6. MAD is especially useful with distributions that have neither mean nor variance (e.g. the Cauchy distribution.)\n * \\note 7. MAD is a robust statistic because it is less sensitive to outliers in a data series than standard deviation.\n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_MAD(double* X, ///< [in] is the input data sample (a one/two dimensional array).\n\t\t\t\t\t\tsize_t N, ///< [in] is the number of observations in X.\n\t\t\t\t\t\tWORD reserved, ///< [in] This parameter is reserved and must be 1.\n\t\t\t\t\t\tdouble* retVal ///< [out] is the calculated value of this function.\n\t\t\t\t\t\t);\n\n /*! \n * \\brief Returns the long-run variance using a Bartlett kernel with window size k.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The input time series data may include missing values (NaN), but they will not be included in the calculations.\n * \\note 2. The long-run variance is computed as follows:\n\t\t\t\t-\\f$\\sigma^2=\\frac{1}{T}\\sum_{t=k}^{T-k}\\sum_{i=-k}^k w_i(x_t-\\bar{x})(x_{t-i}-\\bar{x})\\f$\n * \\note 3. Where:\n\t\t\t\t-\\f$x_{t} \\in X\\f$ is a value from the input time series data.\n\t\t\t\t-\\f$\\bar{x}\\f$ is the mean of the input time series data. \n\t\t\t\t-The weight \\f$w_i\\f$ in Bartlett kernel is defined as follows:\n\t\t\t\t\t-\\f$w_i= 1- \\frac{\\left | i \\right |}{k+1}\\f$\n\t\t\t\t-\\f$k\\f$ is the input window size for the Bartlett kernel.\n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_LRVAR(double* X, ///< [in] is the input data sample (a one/two dimensional array).\n\t\t\t\t\t size_t N, ///< [in] is the number of observations in X.\n\t\t\t\t\t\t size_t w, ///< [in] is the input Bartlett kernel window size. If omitted, the default value is the cubic root of the sample data size.\n\t\t\t\t\t\t double* retVal ///< [out] is the calculated value of this function.\n\t\t\t\t\t\t );\n\n /*! \n * \\brief Calculates the sum of absolute errors (SAE) between the forecast and the eventual outcomes.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The two time series must be identical in size.\n * \\note 3. A missing value (say \\f$x_k\\f$ or \\f$\\hat x_k\\f$) in either time series will exclude the data point \\f$(x_k,\\hat x_k)\\f$ from the SSE.\n * \\note 4. The sum of absolute errors (SAE) or deviations (SAD), is defined as follows:\n\t\t\t\t-\\f$\\mathrm{SAE}=\\mathrm{SAD}=\\sum_{i=1}^N \\left | x_i-\\hat x_i \\right |\\f$, where:\n\t\t\t\t\t-\\f$\\{x_i\\}\\f$ is the actual observations time series.\n\t\t\t\t\t-\\f$\\{\\hat x_i\\}\\f$ is the estimated or forecasted time series.\n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_SAD(double* X, ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n\t\t\t\t\t\tdouble* Y, ///< [in] is the forecast time series data (a one dimensional array). \n\t\t\t\t\t\tsize_t N, ///< [in] is the number of observations in X.\n\t\t\t\t\t\tdouble* retVal ///< [out] is the calculated value of this function.\n\t\t\t\t\t\t);\n\n /*! \n * \\brief Calculates the mean absolute error function for the forecast and the eventual outcomes. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The mean absolute error is a common measure of forecast error in time series analysis.\n * \\note 2. The time series is homogeneous or equally spaced.\n * \\note 3. The two time series must be identical in size.\n * \\note 4. The mean absolute error is given by:\n\t\t\t\t-\\f$\\mathrm{MAE}=\\frac{\\mathrm{SAE}}{N}=\\frac{\\sum_{i=1}^N \\left | x_i - \\hat x_i \\right |}{N}\\f$, where:\n\t\t\t\t\t-\\f$\\{x_i\\}\\f$ is the actual observations time series.\n\t\t\t\t\t-\\f$\\{\\hat x_i\\}\\f$ is the estimated or forecasted time series.\n\t\t\t\t\t-\\f$\\mathrm{SAE}\\f$ is the sum of the absolute errors (or deviations).\n\t\t\t\t\t-\\f$N\\f$ is the number of non-missing data points.\n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_MAE(double* X, ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n\t\t\t\t\t\tdouble* Y, ///< [in] is the forecast time series data (a one dimensional array). \n\t\t\t\t\t\tsize_t N, ///< [in] is the number of observations in X.\n\t\t\t\t\t\tdouble* retVal ///< [out] is the calculated value of this function.\n\t\t\t\t\t\t);\n\n\n int __stdcall NDK_MASE(double* X, ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n double* Y, ///< [in] is the forecast time series data (a one dimensional array). \n size_t N, ///< [in] is the number of observations in X.\n size_t M, ///< [in] is the seasonal period (for non-seasonal time series, set M=1).\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n\n\n\n /*! \n * \\brief Calculates the mean absolute percentage error (deviation) function for the forecast and the eventual outcomes.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. MAPE is also referred to as MAPD. \n * \\note 2. The time series is homogeneous or equally spaced. \n * \\note 3. For a plain MAPE calculation, in the event that an observation value (i.e. \\f$x_k\\f$) is equal to zero, the MAPE function skips that data point. \n * \\note 4. The mean absolute percentage error (MAPE), also known as mean absolute percentage deviation (MAPD), measures the accuracy of a method for constructing fitted time series values in statistics. \n * \\note 5. The two time series must be identical in size. \n * \\note 6. The mean absolute percentage error (MAPE) is defined as follows:\n\t\t\t\t-\\f$\\mathrm{MAPE}=\\frac{100}{N}\\times \\sum_{i=1}^N \\left | \\frac{x_i - \\hat x_i}{x_i} \\right |\\f$, where:\n\t\t\t\t\t-\\f$\\{x_i\\}\\f$ is the actual observations time series. \n\t\t\t\t\t-\\f$\\{\\hat x_i\\}\\f$ is the estimated or forecasted time series.\n\t\t\t\t\t-\\f$N\\f$ is the number of non-missing data points.\n * \\note 7. When calculating the average MAPE for a number of time series, you may encounter a problem: a few of the series that have a very high MAPE might distort a comparison between the average MAPE of a time series fitted with one method compared to the average MAPE when using another method. \n * \\note 8. In order to avoid this problem, other measures have been defined, for example the SMAPE (symmetrical MAPE), weighted absolute percentage error (WAPE), real aggregated percentage error and relative measure of accuracy (ROMA). \n * \\note 9. The symmetrical mean absolute percentage error (SMAPE) is defined as follows:\n\t\t\t\t-\\f$\\mathrm{SMAPE}=\\frac{200}{N}\\times \\sum_{i=1}^N \\left | \\frac{x_i - \\hat x_i}{x_i+\\hat x_i} \\right |\\f$\n * \\note 10. The SMAPE is easier to work with than MAPE, as it has a lower bound of 0% and an upper bound of 200%. \n * \\note 11. The SMAPE does not treat over-forecast and under-forecast equally.\n * \\note 12. For a SMAPE calculation, in the event the sum of the observation and forecast values (i.e. \\f$x_k + \\hat x_k\\f$) equals zero, the MAPE function skips that data point. \n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_MAPE(double* X, ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n\t\t\t\t\t\t double* Y, ///< [in] is the forecast time series data (a one dimensional array). \n\t\t\t\t\t\t size_t N, ///< [in] is the number of observations in X.\n\t\t\t\t\t\t BOOL SMAPE, ///< [in] is a switch to select the return output (FALSE=MAPE (default), TRUE=Symmetric MAPE (SMAPI)). \n\t\t\t\t\t\t double* retVal ///< [out] is the calculated value of this function.\n\t\t\t\t\t\t );\n\n\n int __stdcall NDK_MdAPE(double* X, ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n double* Y, ///< [in] is the forecast time series data (a one dimensional array). \n size_t N, ///< [in] is the number of observations in X.\n BOOL SMAPE, ///< [in] is a switch to select the scale to divide on: FALSE = Actual obs., TRUE= Average (Actual, Forecast)\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n int __stdcall NDK_MAAPE(double* X, ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n double* Y, ///< [in] is the forecast time series data (a one dimensional array). \n size_t N, ///< [in] is the number of observations in X.\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n\n\n /*! \n * \\brief Calculates the root mean squared error (aka root mean squared deviation (RMSD)) function.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The RMSE is also known as root mean squared deviation (RMSD). \n * \\note 2. Please see NDK_RMSD for definition and notes. \n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_RMSE(double* X, ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array).\n\t\t\t\t\t\t double* Y, ///< [in] is the forecast time series data (a one dimensional array). \n\t\t\t\t\t\t size_t N, ///< [In] is the number of observations in X.\n\t\t\t\t\t\t WORD retType, ///< [In] is a switch to select the return output (1=RMSE (default), 2=NRMSE, 3=CV(RMSE)).\n\t\t\t\t\t\t double* retVal ///< [out] is the calculated value of this function.\n\t\t\t\t\t\t );\n\n \n \n int __stdcall NDK_GRMSE(double* X, ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array).\n double* Y, ///< [in] is the forecast time series data (a one dimensional array). \n size_t N, ///< [In] is the number of observations in X.\n double* retVal ///< [out] is the calculated value of this function.\n );\n \n \n /*! \n * \\brief Calculates the sum of the squared errors of the prediction function.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n *\t \\note 1. The time series is homogeneous or equally spaced. \n *\t \\note 2. The two time series must be identical in size. \n * \\note 3. A missing value (e.g. \\f$x_k\\f$ or \\f$\\hat x_k\\f$) in either time series will exclude the data point \\f$(x_k,\\hat x_k)\\f$ from the SSE. \n * \\note 4. The sum of the squared errors, \\f$\\mathrm{SSE}\\f$, is defined as follows:\n\t\t\t\t\\f$\\mathrm{SSE}=\\sum_{i=1}^N \\left(x_i-\\hat x_i \\right )^2\\f$, where:\n\t\t\t\t\t-\\f$\\{x_i\\}\\f$ is the actual observations time series.\n\t\t\t\t\t-\\f$\\{\\hat x_i\\}\\f$ is the estimated or forecasted time series.\n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_SSE(double* X,\t\t ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n\t\t\t\t\t\tdouble* Y,\t\t ///< [in] is the forecasted time series data (a one dimensional array). \n\t\t\t\t\t\tsize_t N,\t\t ///< [in] is the number of observations in X.\n\t\t\t\t\t\tdouble* retVal\t ///< [out] is the calculated sum of squared errors.\n\t\t\t\t\t\t);\n\n\n /*!\n * \\brief Calculates the mean squared errors of the prediction function.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n *\t \\note 1. The two data sets must be identical in size.\n * \\note 2. A missing value (e.g. \\f$x_k\\f$ or \\f$\\hat x_k\\f$) in either time series will exclude the data point \\f$(x_k,\\hat x_k)\\f$ from the MSE.\n * \\sa NDK_SSE()\n */\n int __stdcall NDK_MSE(double* X,\t\t ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n double* Y,\t\t ///< [in] is the forecasted time series data (a one dimensional array). \n size_t N,\t\t ///< [in] is the number of observations in X.\n double* retVal\t///< [out] is the calculated mean of squared errors.\n );\n\n\n int __stdcall NDK_GMSE(double* X,\t\t ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n double* Y,\t\t ///< [in] is the forecasted time series data (a one dimensional array). \n size_t N,\t\t ///< [in] is the number of observations in X.\n double* retVal\t///< [out] is the calculated mean of squared errors.\n );\n\n\n\n\n int __stdcall NDK_MRAE(double* X,\t\t ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n double* Y,\t\t ///< [in] is the forecasted time series data (a one dimensional array). \n size_t N,\t\t ///< [in] is the number of observations in X.\n size_t period, ///< [in] is the seasonal period (for non-seasonal time series, set M=1).\n double* retVal\t ///< [out] is the calculated mean of relative absolute error\n );\n\n int __stdcall NDK_MdRAE(double* X,\t\t///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n double* Y,\t\t ///< [in] is the forecasted time series data (a one dimensional array). \n size_t N,\t\t ///< [in] is the number of observations in X.\n size_t period, ///< [in] is the seasonal period (for non-seasonal time series, set M=1).\n double* retVal\t ///< [out] is the calculated median of relative absolute error\n );\n\n\n\n int __stdcall NDK_GMRAE(double* X,\t\t ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n double* Y,\t\t ///< [in] is the forecasted time series data (a one dimensional array). \n size_t N,\t\t ///< [in] is the number of observations in X.\n size_t period, ///< [in] is the seasonal period (for non-seasonal time series, set M=1).\n double* retVal\t ///< [out] is the calculated geometric mean of relative absolute error\n );\n\n\n\n int __stdcall NDK_PB(double* X,\t\t ///< [in] is the original (eventual outcomes) time series sample data (a one dimensional array). \n double* Y,\t\t ///< [in] is the forecasted time series data (a one dimensional array). \n size_t N,\t\t ///< [in] is the number of observations in X.\n size_t period, ///< [in] is the seasonal period (for non-seasonal time series, set M=1).\n WORD basis, ///< [in] is the switch to specify the metric used for comparison: 0=absolute error, 1=MAE, 2=MSE\n double* retVal\t ///< [out] is the calculated geometric mean of relative absolute error\n );\n\n\n\n /*! \n * \\brief Calculates the sample autocorrelation function (ACF) of a stationary time series.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The time series is homogeneous or equally spaced. \n * \\note 2. The time series may include missing values (NaN) at either end. \n * \\note 3. The lag order (k) must be less than the time series size or else an error value (#NDK_FAILED) is returned. \n * \\note 4. The ACF values are bound between -1 and 1, inclusive.\n * \\note 5. The sample autocorrelation is computed as:\n\t\t\t\t-\\f$\\hat{\\rho}(h)=\\frac{\\sum_{k=h}^T{(y_{k}-\\bar y)(y_{k-h}-\\bar y)}}{\\sum_{k=h}^T(y_{k}-\\bar y)^2}\\f$, where:\n\t\t\t\t\t-\\f$y_{t}\\f$ is the value of the time series at time t.\n\t\t\t\t\t-\\f$h\\f$ is the lag order.\n\t\t\t\t\t-\\f$T\\f$ is the number of non-missing values in the time series data.\n\t\t\t\t\t-\\f$\\bar y\\f$ is the sample average/mean of the time series.\n * \\note 6. Special cases:\n\t\t\t\t-By definition, \\f$\\hat{\\rho}(0) \\equiv 1.0\\f$\n * \\sa NDK_ACF_ERROR(), NDK_XCF()\n */\n int __stdcall NDK_ACF(double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n WORD method, ///< [in] is the method selecor (0 = sample autocorrelation, 1= periodogram-based estimate, 2= cross-correlation based estimate).\n double* retVal ///< [out] is the calculated sample autocorrelation value. \n );\n\n\n /*! \n * \\brief Calculates the standard error in the sample autocorrelation function.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ACF(), NDK_ACFCI()\n */\n int __stdcall NDK_ACF_ERROR(double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n WORD method, ///< [in] is the method selecor (0 = sample autocorrelation, 1= periodogram-based estimate, 2= cross-correlation based estimate).\n double* retVal ///< [out] is the standard error in the sample autocorrelation value. \n );\n\n\n /*! \n * \\brief Calculates the confidence interval limits (upper/lower) for the autocorrelation function.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The time series is homogeneous or equally spaced. \n * \\note 2. The time series may include missing values (NaN) at either end. \n * \\note 3. The lag order (k) must be less than the time series size, or else an error value (#NDK_FAILED) is returned. \n * \\note 4. The ACFCI function calculates the confidence limits as:\n\t\t\t\t-\\f$\\hat\\rho_k - Z_{\\alpha/2}\\times \\sigma_{\\rho_k} \\leq \\rho_k \\leq \\hat\\rho_k+ Z_{\\alpha/2}\\times \\sigma_{\\rho_k}\\f$, where:\n\t\t\t\t\t-\\f$\\rho_k\\f$ is the population autocorrelation function.\n\t\t\t\t\t-\\f$\\sigma_{\\rho_k}\\f$ is the standard error of the sample autocorrelation.\n\t\t\t\t\t-\\f$\\hat{\\rho_{k}}\\f$ is the sample autocorrelation function for lag k.\n\t\t\t\t\t-\\f$Z\\sim N(0,1)\\f$\n\t\t\t\t\t-\\f$P(\\left|Z\\right|\\geq Z_{\\alpha/2}) = \\alpha\\f$\n * \\note 5. For the case in which the underlying population distribution is normal, the sample autocorrelation also has a normal distribution: \n\t\t\t\t-\\f$\\hat \\rho_k \\sim N(\\rho_k,\\sigma_{\\rho_k}^2)\\f$, where:\n\t\t\t\t\t-\\f$\\hat \\rho_k\\f$ is the sample autocorrelation for lag k.\n\t\t\t\t\t-\\f$\\rho_k\\f$ is the population autocorrelation for lag k.\n\t\t\t\t\t-\\f$\\sigma_{\\rho_k}\\f$ is the standard error of the sample autocorrelation for lag k.\n * \\note 6. Bartlett proved that the variance of the sample autocorrelation of a stationary normal stochastic process (i.e. independent, identically normal distributed errors) can be formulated as:\n\t\t\t\t-\\f$\\sigma_{\\rho_k}^2 = \\frac{\\sum_{j=-\\infty}^{\\infty}\\rho_j^2+\\rho_{j+k}\\rho_{j-k}-4\\rho_j\\rho_k\\rho_{i-k}+2\\rho_j^2\\rho_k^2}{T}\\f$\n * \\note 7. Furthermore, the variance of the sample autocorrelation is reformulated: \n\t\t\t\t-\\f$\\sigma_{\\rho_k}^2 = \\frac{1+\\sum_{j=1}^{k-1}\\hat\\rho_j^2}{T}\\f$, where:\n\t\t\t\t\t-\\f$\\sigma_{\\rho_k}\\f$ is the standard error of the sample autocorrelation for lag k. \n\t\t\t\t\t-\\f$T\\f$ is the sample data size.\n\t\t\t\t\t-\\f$\\hat\\rho_j\\f$ is the sample autocorrelation function for lag j.\n\t\t\t\t\t-\\f$k\\f$ is the lag order. \n * \\sa NDK_ACF(), NDK_ACF_ERROR()\n */\n int __stdcall NDK_ACFCI(double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n WORD method, ///< [in] is the method selecor (0 = sample autocorrelation, 1= periodogram-based estimate, 2= cross-correlation based estimate).\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n double* ULCI, ///< [out] is the upper limit value of the confidence interval \n double* LLCI ///< [out] is the lower limit value of the confidence interval.\n );\n\n /*! \n * \\brief Calculates the sample partial autocorrelation function (PACF). \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ACF(), NDK_PACF_ERROR(),NDK_PACFCI() \n */\n int __stdcall NDK_PACF( double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n double* retVal ///< [out] is the calculated sample partial-autocorrelation value.\n );\n /*! \n * \\brief Calculates the standard error of the sample partial autocorrelation function (PACF). \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_PACF(), NDK_PACFCI()\n */\n int __stdcall NDK_PACF_ERROR( double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n double* retVal ///< [out] is the standard error in the sample partial-autocorrelation value. \n );\n\n /*! \n * \\brief Calculates the confidence interval limits (upper/lower) for the partial-autocorrelation function.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ACF(), NDK_ACF_ERROR()\n */\n int __stdcall NDK_PACFCI( double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n double* ULCI, ///< [out] is the upper limit value of the confidence interval.\n double* LLCI ///< [out] is the lower limit value of the confidence interval.\n );\n\n\n\n /*! \n * \\brief Calculates the periodgram value for different lags.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ACF(), NDK_PACF()\n */\n int __stdcall NDK_PERIODOGRAM(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in pData. \n PERIODOGRAM_OPTION_TYPE option, ///< [in] is the pre-processing option to the time series (e.g. detrend, difference, auto, etc.)\n double alpha, ///< [in] is the statistical significance level (used in the auto-process procedure). If missing, a default of 5% is assumed.\n double* retVal, ///< [out] is the periodogram values for this series\n size_t nOutSize ///< [in] is the size of the output buffer (i.e. retVal)\n );\n\n\n /*! \n * \\brief Calculates the estimated value of the exponential-weighted volatility (EWV). \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The time series is homogeneous or equally spaced. \n * \\note 2. The time series may include missing values (NaN) at either end. \n * \\note 3. The EWMA function assumes that the time series has an average equal to zero. \n * \\note 4. The exponential-weighted moving average is calculated as:\n\t\t\t\t-\\f$\\sigma_t^2=\\lambda \\sigma_{t-1}^2+(1-\\lambda)x_{t-1}^2\\f$, where:\n\t\t\t\t\t-\\f$x_t\\f$ is the value of the time series value at time t. \n\t\t\t\t\t-\\f$\\lambda\\f$ is the smoothing parameter (i.e. a non-negative constant between 0 and 1).\n * \\note 5. The size of the EWMA time series is equal to the input time series, but with the first observation (or last, if the original series is reversed) set to missing (NaN). \n * \\sa NDK_WMA(), NDK_EWXCF()\n */\n int __stdcall NDK_EWMA(double *X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n double lambda, ///< [in] is the smoothing parameter used for the exponential-weighting scheme. If missing, a default value of 0.94 is assumed \n size_t step, ///< [in] is the forecast time/horizon (expressed in terms of steps beyond the end of the time series X). If missing, a default value of 0 is assumed. \n double* retVal ///< [out] is the estimated value of the exponential-weighted volatility.\n );\n\n /*! \n * \\brief Computes the correlation factor using the exponential-weighted correlation function.\n * \\details NDK_EWXCF computes the correlation estimate using the exponential-weighted covariance (EWCOV) and volatility (EWMA/EWV) method for each time series.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The two time series must have identical size and time order. \n * \\note 3. The correlation is defined as:\n\t\t\t\t-\\f$\\rho^{(xy)}_t=\\frac{\\sigma_t^{(xy)}}{{_x\\sigma_t}\\times{_y\\sigma_t}}\\f$\n\t\t\t\t-\\f$\\sigma_t^{(xy)} = \\lambda\\sigma_{t-1}^{(xy)}+(1-\\lambda)x_{t-1}y_{t-1}\\f$\n\t\t\t\t-\\f$_x\\sigma_t^2=\\lambda\\times{_x\\sigma_{t-1}^2}+(1-\\lambda)x_{t-1}^2\\f$\n\t\t\t\t-\\f$_y\\sigma_t^2=\\lambda\\times{_y\\sigma_{t-1}^2}+(1-\\lambda)y_{t-1}^2\\f$, where:\n\t\t\t\t\t-\\f$\\rho^{(xy)}_t\\f$ is the sample correlation between X and Y at time t.\n\t\t\t\t\t-\\f$\\sigma_t^{(xy)}\\f$ is the sample exponential-weighted covariance between X and Y at time t.\n\t\t\t\t\t-\\f$_x\\sigma_t\\f$ is the sample exponential-weighted volatility for the time series X at time t.\n\t\t\t\t\t-\\f$_y\\sigma_t\\f$ is the sample exponential-weighted volatility for the time series Y at time t.\n\t\t\t\t\t-\\f$\\lambda\\f$ is the smoothing factor used in the exponential-weighted volatility and covariance calculations.\n * \\sa SFMacros.h, NDK_WMA(), NDK_EWMA()\n */\n int __stdcall NDK_EWXCF( double *X, ///< [in] is the first univariate time series data (a one dimensional array).\n double *Y, ///< [in] is the second univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X (or Y).\n double lambda, ///< [in] is the smoothing parameter used for the exponential-weighting scheme. If missing, a default value of 0.94 is assumed. \n size_t step, ///< [in] is the forecast time/horizon (expressed in terms of steps beyond the end of the time series X). If missing, a default value of 0 is assumed. \n double* retVal ///< [out] is the estimated value of the correlation factor.\n );\n\n\n ///@}\n\n /// \\name Statistical Distribution\n /// Statistical distribution\n /// @{\n\n /*! \n * \\brief Calculates the excess kurtosis of the generalized error distribution (GED).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_TDIST_XKURT(), NDK_XKURTTEST()\n */\n int __stdcall NDK_GED_XKURT(double df, ///< [in] is the shape parameter (or degrees of freedom) of the distribution (V > 1). \n double* retVal ///< [out] is the computed value\n );\n\n /*! \n * \\brief Calculates the excess kurtosis of the student's t-distribution.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_TDIST_XKURT(), NDK_XKURTTEST()\n */\n int __stdcall NDK_TDIST_XKURT(double df, ///< [in] is the degrees of freedom of the student's t-distribution (v > 4). \n double* retVal ///< [out] is the computed value.\n );\n\n /*! \n * \\brief Calculates the empirical distribution function (or empirical cdf) of the sample data.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_KERNEL_DENSITY_ESTIMATE(), NDK_HISTOGRAM()\n */\n int __stdcall NDK_EDF(double* pData, ///< [in] is the input data series (one/two dimensional array). \n size_t nSize, ///< [in] is the number of elements in pData.\n double targetVal, ///< [in] is the target value to compute the underlying cdf for. \n WORD retType, ///< [in] is a switch to select the return output (1=CDF (default), 2=Inverse CDF). \n double* retVal ///< [out] is the computed value.\n );\n\n /*! \n * \\brief Returns the number of histogram bins using a given method. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_HIST_BIN_LIMIT(), NDK_HISTOGRAM()\n */\n int __stdcall NDK_HIST_BINS(double* pData, ///< [in] is the input data series (one/two dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n WORD argMethod, ///< [in] is a switch to select the calculation method (1=Sturges's formula, 2=Square-root, 3=Scott's Choice, 4=Freedman-Diaconis choice, 5=Optimal (default)). \n size_t* retVal ///< [out] is the computed value.\n );\n\n /*! \n * \\brief Returns the upper/lower limit or center value of the k-th histogram bin. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_HIST_BINS(), NDK_HISTOGRAM()\n */\n int __stdcall NDK_HIST_BIN_LIMIT( double* pData, ///< [in] is the input data series (one/two dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n size_t nBins, ///< [in] is the input number of bins for the histogram.\n size_t index, ///< [in] is the bin index or order; e.g. 0=1st bin (default),1=2nd bin,..., N-1. \n WORD argRetTYpe, ///< [in] is a switch to select the return output (0=lower limit (default), 1=upper limit of the bin, 2=center of the bin). \n double* retVal ///< [out] is the computed value.\n );\n\n /*! \n * \\brief Calculates the histogram or cumulative histogram function for a given bin. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_HIST_BINS(), NDK_HISTOGRAM()\n */\n int __stdcall NDK_HISTOGRAM( double* pData, ///< [in] is the input data series (one/two dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n size_t nBins, ///< [in] is the input number of bins for the histogram.\n size_t index, ///< [in] is the bin index or order; e.g. 0=1st bin (default),1=2nd bin,..., N. \n WORD argRetTYpe, ///< [in] is a switch to select the return output:\n /// 0. histogram \n /// 1. cumulative histogram (default)). \n double* retVal ///< [out] is the computed value.\n );\n\n\n /*! \n * \\brief Returns the upper/lower limit or center value of the k-th histogram bin. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_HIST_BINS(), NDK_HISTOGRAM()\n */\n int __stdcall NDK_KERNEL_DENSITY_ESTIMATE(double* pData, ///< [in] is the input data series (one/two dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n double targetVal, ///< [in] is the target value to compute the underlying cdf for. \n double bandwidth, ///< [in] is the smoothing parameter (bandwidth) of the kernel density estimator. If missing, the KDE function calculates an optimal value.\n WORD argKernelFunc, ///< [in] is a switch to select the kernel function:\n /// 1=Gaussian (default),\n /// 2=Uniform\n /// 3=Triangular \n /// 4=Biweight (Quatric)\n /// 5=Triweight\n /// 6=Epanechnikov\n double* retVal ///< [out] is the computed value.\n );\n\n\n /*! \n * \\brief Returns a sequence of random numbers drawn from Normal distribution\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_GAUSS_FORECI()\n */\n int __stdcall NDK_GAUSS_RNG( double mean, ///< [in] is the mean of the Gaussian distribution.\n double sigma, ///< [in] is the standard deviation of the Gaussian distribution.\n UINT seed, ///< [in] is a number to initialize the psuedorandom number generator. \n double* retArray, ///< [out] are the generated random values.\n UINT nArraySize ///< [in] is the number of elements in retArray\n );\n\n\n /*! \n * \\brief Returns the upper & lower limit of the confidence interval for the Gaussian distribution.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_TSTUDENT_FORECI(), NDK_GED_FORECI()\n */\n int __stdcall NDK_GAUSS_FORECI( double mean, ///< [in] is the mean of the Gaussian distribution.\n double sigma, ///< [in] is the standard deviation of the Gaussian distribution.\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n BOOL upper, ///< [in] is a switch to select the limit (upper/lower).\n double* retVal ///< [out] is the computed value.\n );\n\n /*! \n * \\brief Returns the upper & lower limit of the confidence interval for the student\\'s t-distribution\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_GAUSS_FORECI(), NDK_GED_FORECI()\n */\n int __stdcall NDK_TSTUDENT_FORECI(double mean, ///< [in] is the mean of the student's t-distribution.\n double sigma, ///< [in] is the standard deviation of the student's t-distribution.\n double df, ///< [in] is the degrees of freedom (nu) of the student's t-distribution.\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n BOOL upper, ///< [in] is a switch to select the limit (upper/lower).\n double* retVal ///< [out] is the computed value.\n );\n\n\n\n /*! \n * \\brief Returns the upper & lower limit of the confidence interval for the Generalized Error Distribution (GED) distribution\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_TSTUDENT_FORECI(), NDK_GAUSS_FORECI()\n */\n int __stdcall NDK_GED_FORECI( double mean, ///< [in] is the mean of the GED distribution.\n double sigma, ///< [in] is the standard deviation of the GED distribution.\n double df, ///< [in] is the degrees of freedom (nu) of the GED distribution.\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n BOOL upper, ///< [in] is a switch to select the limit (upper/lower).\n double* retVal ///< [out] is the computed value.\n );\n ///@}\n\n\n /// \\name Statistical Testing\n /// Statistical/hypothesis testing is a common method of drawing inferences about a population based on statistical evidence from a sample.\n /// @{\n\n /*! \n * \\brief Calculates the p-value of the statistical test for the population autocorrelation function.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa SFMacros.h, NDK_WMA(), NDK_EWMA()\n */\n int __stdcall NDK_ACFTEST(double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n int K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n WORD method, ///< [in] is the type of test: parametric or non-parametric.\n double target, ///< [in] is the assumed autocorrelation function value. If missing, the default of zero is assumed.\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n double* retVal ///< [out] is the calculated test statistics.\n );\n\n /*! \n * \\brief Returns the p-value of the normality test (i.e. whether a data set is well-modeled by a normal distribution).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_MEANTEST(), NDK_SKEWTEST(), #NORMALTEST_METHOD, #TEST_RETURN\n */\n int __stdcall NDK_NORMALTEST( double* X, ///< [in] is the sample data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n WORD method, ///< [in] is the statistical test to perform (1=Jarque-Bera, 2=Shapiro-Wilk, 3=Chi-Square (Doornik and Hansen)). \n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n double* retVal ///< [out] is the calculated test statistics.\n );\n\n /*! \n * \\brief Computes the p-value of the statistical portmanteau test (i.e. whether any of a group of autocorrelations of a time series are different from zero).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_NORMALTEST(), NDK_ARCHTEST()\n */\n int __stdcall NDK_WNTEST( double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n WORD method, ///< [in] is the statistical test to perform (1=Ljung-Box). \n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n double* retVal ///< [out] is the calculated test statistics.\n );\n\n /*! \n * \\brief Calculates the p-value of the ARCH effect test (i.e. the white-noise test for the squared time series).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_NORMALTEST(), NDK_ARCHTEST(), \n */\n int __stdcall NDK_ARCHTEST( double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n WORD method, ///< [in] is the statistical test to perform (1=Ljung-Box).\n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n double* retVal ///< [out] is the calculated test statistics.\n );\n /*! \n * \\brief Calculates the p-value of the statistical test for the population mean.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_SKEWTEST(), NDK_STDEVTEST()\n */\n int __stdcall NDK_MEANTEST( double* X, ///< [in] is the sample data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n double target, ///< [in] is the assumed mean value. If missing, a default of zero is assumed.\n double alpha, ///< [in] is the statistical significance level. If missing, the default of 5% is assumed.\n WORD method, ///< [in] is the statistical test to perform (1=parametric).\n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n double* retVal ///< [out] is the calculated test statistics.\n );\n /*! \n * \\brief Calculates the p-value of the statistical test for the population standard deviation.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_MEANTEST(), NDK_SKEWTEST(), NDK_XKURTTEST()\n */\n int __stdcall NDK_STDEVTEST(double* X, ///< [in] is the sample data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n double target, ///< [in] is the assumed standard deviation value. If missing, a default of one is assumed\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n WORD method, ///< [in] is the statistical test to perform (1=parametric). \n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n double* retVal ///< [out] is the calculated test statistics.\n );\n\n /*! \n * \\brief Calculates the p-value of the statistical test for the population skew (i.e. 3rd moment).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_NORMALTEST(), NDK_MEANTEST(), NDK_STDEVTEST(), NDK_XKURTTEST()\n */\n int __stdcall NDK_SKEWTEST( double* X, ///< [in] is the sample data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n double alpha, ///< [in] is the statistical significance level. If missing, the default of 5% is assumed.\n WORD method, ///< [in] is the statistical test to perform (1=parametric). \n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n double* retVal ///< [out] is the calculated test statistics.\n );\n\n /*! \n * \\brief Calculates the p-value of the statistical test for the population excess kurtosis (4th moment).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa SFMacros.h, NDK_NORMALTEST(), NDK_MEANTEST(), NDK_STDEVTEST(), NDK_SKEWTEST()\n */\n int __stdcall NDK_XKURTTEST(double* X, ///< [in] is the sample data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n WORD method, ///< [in] is the statistical test to perform (1=parametric). \n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n double* retVal ///< [out] is the calculated test statistics.\n );\n /*! \n * \\brief Calculates the test stats, p-value or critical value of the correlation test.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_NORMALTEST(), NDK_MEANTEST(), NDK_STDEVTEST(), NDK_SKEWTEST()\n */\n int __stdcall NDK_XCFTEST(double* X, ///< [in] is the first univariate time series data (a one dimensional array).\n double *Y, ///< [in] is the second univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X (or Y).\n int K, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n double target, ///< [in] is the assumed correlation value. If missing, a default of zero is assumed.\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n WORD method, ///< [in] is the desired correlation coefficient (1=Pearson (default), 2=Spearman, 3=Kendall). If missing, a Pearson coefficient is assumed.\n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n double* retVal ///< [out] is the calculated test statistics.\n );\n\n\n /*! \n * \\brief Returns the p-value of the Augmented Dickey-Fuller (ADF) test, which tests for a unit root in the time series sample.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_NORMALTEST(), NDK_MEANTEST(), NDK_STDEVTEST(), NDK_SKEWTEST()\n */\n int __stdcall NDK_ADFTEST(double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD K, ///< [in] is the lag length of the autoregressive process. If missing, an initial value equal to the cubic root of the input data size is used.\n ADFTEST_OPTION options, ///< [in] is the model description flag for the Dickey-Fuller test variant (1=no constant, 2=contant-only, 3=trend only, 4=constant and trend, 5=const, trend and trend squared). \n BOOL testDown, ///< [in] is the mode of testing. If set to TRUE (default), ADFTest performs a series of tests. The test starts with the input length lag, but the actual length lag order used is obtained by testing down. \n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n WORD method, ///< [in] is the statistical test to perform (1=ADF).\n WORD retType, ///< [in] is a switch to select the return output: (\\ref #TEST_RETURN)\n /// 1. P-value\n /// 2. Test statistics (aka score)\n /// 3. Critical value\n\n double* retVal ///< [inout] is the calculated test statistics.\n );\n int __stdcall NDK_KPSSTEST(double* pData, size_t nSize, WORD maxOrder, WORD option, BOOL testDown, WORD argMethod, WORD retType, double alpha, double* retVal);\n\n\n /*! \n * \\brief Returns the Johansen (cointegration) test statistics for two or more time series.\n * \\note 1. Each column in the input matrix corresponds to a separate time series variable.\n * \\note 2. The input matrix can have no more than twelve (12) columns (or variables).\n * \\note 3. Each row in the input matrix corresponds to an observation.\n * \\note 4. The number of cointegrating relationships should be no greater than the number of input variables.\n * \\note 5. The time series data are homogeneous or equally spaced.\n * \\note 6. The time series may include missing values (e.g. NaN) at either end.\n * \\note 7. There are two types of Johansen tests - with trace or with eigenvalue - and the inferences might be a bit different for each.\n * - The null hypothesis for the trace test is the number of cointegration vectors r <= ?\n * - The null hypothesis for the eigenvalue test is r = ?\n * \\note 8. The function was added in version 1.62 DEWDROP.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_ADFTEST(), NDK_MEANTEST(), NDK_STDEVTEST(), NDK_SKEWTEST()\n * \n */\n int __stdcall NDK_JOHANSENTEST(double** XX, ///< [in] is the multivariate time series matrix data (two dimensional).\n size_t N, ///< [in] is the number of observations in XX.\n size_t M, ///< [in] is the number of variables in XX.\n size_t K, ///< [in] is the number of lagged difference terms used when computing the estimator.\n short nPolyOrder, ///< [in] is the order of the polynomial: (-1=no constant, 0=contant-only (default), 1=constant and trend).\n BOOL tracetest, ///< [in] is a flag to select test: TRUE=trace, FALSE=maximal eignvalue test.\n WORD R, ///< [in] is the assumed number of cointegrating relationships between the variables (if missing, r=1). \n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n double* retStat, ///< [out] is the calculated test statistics score.\n double *retCV ///< [out] is the calculated test critical value.\n ); /// \\example sdk_cointegration.cpp\n\n /*! \n * \\brief Returns the collinearity test statistics for a set of input variables.\n * \\note 1. Each column in the input matrix corresponds to a separate time series variable.\n * \\note 2. The input matrix can have no more than twelve (12) columns (or variables).\n * \\note 3. Each row in the input matrix corresponds to an observation.\n * \\note 4. The input data may include missing values (e.g. NaN).\n * \\note 5. In the variance inflation factor (VIF) method, a series of regressions models are constructed, where one variable is the dependent variable against the remaining predictors.\n * \\note 6. A tolerance of less than 0.20 or 0.10 and/or a VIF of 5 or 10 and above indicates a multicollinearity problem. \n * \\note 7. As a rule of thumb, a condition number (\\f$\\kappa\\f$) greater or equal to 30 indicates a severe multi-collinearity problem.\n * \\note 8. The CollinearityTest function is available starting with version 1.60 APACHE.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_CHOWTEST()\n * \n */\n int __stdcall\tNDK_COLNRTY_TEST (double** XX, ///< [in] is the input variables matrix data (two dimensional).\n size_t N, ///< [in] is the number of rows (observations) in XX.\n size_t M, ///< [in] is the number of columns (variables) in XX.\n LPBYTE mask, ///< [in] is the boolean array to select a subset of the input variables in X. If NULL, all variables in X are included.\n size_t nMaskLen, ///< [in] is the number of elements in the mask. Must be zero or equal to M.\n COLNRTY_TEST_TYPE nMethod, ///< [in] is the multi-colinearity measure to compute (see #COLNRTY_TEST_TYPE).\n WORD nColIndex, ///< [in] is a switch to designate the explanatory variable to examine (not required for condition number).\n double* retVal ///< [out] is the calculated statistics of collinearity.\n );\n /*! \n * \\brief Returns the p-value of the regression stability test (i.e. whether the coefficients in two linear regressions on different data sets are equal). \n * \\note 1. Each column in the input matrix corresponds to a separate time series variable.\n * \\note 2. The input matrix can have no more than twelve (12) columns (or variables). \n * \\note 3. Each row in the input matrix corresponds to an observation.\n * \\note 4. The input data may include missing values (e.g. NaN).\n * \\note 5. Observations (i.e. row) with missing values in X or Y are removed.\n * \\note 6. The number of observations of each data set must be larger than the number of explanatory variables. \n * \\note 8. The CollinearityTest function is available starting with version 1.60 APACHE.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful. see \\ref SFMacros.h\n * \\sa NDK_COLNRTY_TEST()\n * \n */\n int __stdcall NDK_CHOWTEST( double** XX1, ///< [in] is the independent variables data matrix of the first data set (two dimensional).\n size_t M, ///< [in] is the number of variables (columns) in XX1 and XX2.\n double* Y1, ///< [in] is the response or the dependent variable data array for the first data set (one dimensional array).\n size_t N1, ///< [in] is the number of observations (rows) in the first data set.\n double** XX2, ///< [in] is the independent variables data matrix of the second data set, such that each column represents one variable.\n double* Y2, ///< [in] is the response or the dependent variable data array of the second data set (one dimensional array).\n size_t N2, ///< [in] is the number of observations (rows) in the second data set.\n LPBYTE mask, ///< [in] is the boolean array to select a subset of the input variables in X. If NULL, all variables in X are included. \n size_t nMaskLen, ///< [in] is the number of elements in the mask, which must be zero or equal to M.\n double intercept, ///< [in] is the regression constant or the intercept value (e.g. zero). If missing, an intercept is not fixed and will be computed from the data set.\n TEST_RETURN retType, ///< [in] is a switch to select the return output (see #TEST_RETURN for more details).\n double* retVal ///< [in] is the calculated Chow test statistics.\n );\n\n\n ///@}\n\n\n /*!\n * \\name Transfom\n * @{\n */\n /*! \n * \\brief Returns an array of cells for the backward shifted, backshifted or lagged time series.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_DIFF()\n */\n int __stdcall NDK_LAG(double* X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t K ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n );\n /*! \n * \\brief Returns an array of cells for the differenced time series (i.e. (1-L^S)^D).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_LAG(), NDK_INTEG\n */\n int __stdcall NDK_DIFF( double* X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n size_t S, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n size_t D ///< [in] is the number of repeated differencing (e.g. d=0 (none), d=1 (difference once), 2=(difference twice), etc.).\n );\n /*! \n * \\brief Returns an array of cells for the integrated time series (inverse operator of NDK_DIFF).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_LAG(), NDK_DIFF\n */\n int __stdcall NDK_INTEG(double* X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n size_t S, ///< [in] is the lag order (e.g. k=0 (no lag), k=1 (1st lag), etc.).\n size_t D, ///< [in] is the number of repeated differencing (e.g. d=0 (none), d=1 (difference once), 2=(difference twice), etc.).\n double* X0, ///< [in,optional] is the initial (un-differenced) univariate time series data (a one dimensional array). If missing (i.e. NULL), zeros are assumed.\n size_t N0 ///< [in] is the number of observations in X0.\n );\n\n\n /*! \n * \\brief Returns an array of cells of a time series after removing all missing values. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_LAG(), NDK_DIFF\n */\n int __stdcall NDK_RMNA( double *X, ///< [inout] is the univariate sample data (a one dimensional array).\n size_t* N ///< [inout] is the number of observations in X.\n );\n\n /*! \n * \\brief Returns the time-reversed order time series (i.e. the first observation is swapped with the last observation, etc.): both missing and non-missing values.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_LAG(), NDK_DIFF\n */\n int __stdcall NDK_REVERSE(double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N ///< [in] is the number of observations in X.\n );\n\n /*! \n * \\brief Returns an array of cells for the scaled time series. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ADD(), NDK_SUB()\n */\n int __stdcall NDK_SCALE(double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n double K ///< [in] is the scalar/multiplier value.\n );\n\n\n /*! \n * \\brief Returns an array of the difference between two time series. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_ADD(), NDK_SCALE()\n */\n int __stdcall NDK_SUB(double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N1, ///< [in] is the number of observations in X. \n const double *Y, ///< [in] is the second univariate time series data (a one dimensional array).\n size_t N2 ///< [in] is the number of observations in Y. \n );\n\n /*! \n * \\brief Returns an array of cells for the sum of two time series. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_SUB(), NDK_SCALE()\n */\n int __stdcall NDK_ADD(double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N1, ///< [in] is the number of observations in X. \n const double *Y, ///< [in] is the second univariate time series data (a one dimensional array).\n size_t N2 ///< [in] is the number of observations in Y. \n ); \n\n\n\n /*! \n * \\brief Computes the complementary log-log transformation, including its inverse.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_HodrickPrescotFilter(), NDK_DFT(), NDK_IDFT()\n */\n int __stdcall NDK_CLOGLOG(double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n WORD retTYpe ///< [in] is a number that determines the type of return value: 1 (or missing)=C-log-log , 2=inverse C-log-log.\n );\n\n /*! \n * \\brief Computes the probit transformation, including its inverse.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_HodrickPrescotFilter(), NDK_DFT(), NDK_IDFT()\n */\n int __stdcall NDK_PROBIT(double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD retTYpe ///< [in] is a number that determines the type of return value: 1 (or missing)=probit , 2=inverse probit.\n );\n\n /*! \n * \\brief Computes the complementary log-log transformation, including its inverse.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_PROBIT(), NDK_BOXCOX(), NDK_CLOGLOG()\n */\n int __stdcall NDK_LOGIT(double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n WORD retTYpe ///< [in] is a number that determines the type of return value: 1 (or missing)=logit, 2=inverse logit.\n );\n\n\n /*! \n * \\brief Computes the complementary log-log transformation, including its inverse.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_LOGIT(), NDK_PROBIT(), NDK_CLOGLOG()\n */\n int __stdcall NDK_BOXCOX( double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n double* lambda, ///< [in] is the input power parameter of the transformation, on a scale from 1 to 0. If omitted, a default value of 0 is assumed. \n double* alpha, ///< [in] is the input shift parameter for X. If omitted, the default value is 0. \n int retTYpe, ///< [in] is a number that determines the type of return value: 1 (or missing)=Box-Cox, 2=inverse Box-Cox, 3= LLF of Box-Cox.\n double *retVal ///< [out] is the calculated log-likelihood value of the transform (retType=3).\n );\n\n /*! \n * \\brief Detrends a time series using a regression of y against a polynomial time trend of order p.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_BOXCOX(), NDK_RMSEASONAL(), NDK_DIFF()\n */\n int __stdcall NDK_DETREND(double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n WORD polyOrder ///< [in] is the order of the polynomial time trend:\n /// 0. subtracts mean (default)\n /// 1. constant plus trend model\n /// 2. constant plus trend and squared trend model\n );\n\n /*! \n * \\brief Returns an array of the deseasonalized time series, assuming a linear model.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_DETREND(), NDK_DIFF()\n */\n int __stdcall NDK_RMSEASONAL( double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X.\n size_t period ///< [in] is the number of observations(i.e. points) in one season.\n );\n\n\n\n /*! \n * \\brief Returns an array of a time series after substituting all missing values with the mean/median.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_RMNA(), NDK_INTERPOLATE()\n */\n int __stdcall\tNDK_INTERP_NAN( double* X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n WORD nMethod, ///< [in] is an identifier for the method used to generate values for any missing data:\n /// 1. Mean (default)\n /// 2. Median\n /// 3. Constant\n /// 4. Forward flat \n /// 5. Backward flat\n /// 6. Linear\n /// 7. Cubic spline\n /// 8. Weighted moving average\n /// 9. Exponential smoothing\n /// 10. Brownian bridge\n double plug ///< [in] is the data argument related to the selected treatment method (if applicable). For instance, if the method is constant, then the value would be the actual value.\n );\n\n /*! \n * \\brief Examine whether the given array has one or more missing values.\n * \\return status code of the operation\n * \\retval #NDK_TRUE One or more missing value are detected.\n * \\retval #NDK_FALSE No missing value is found.\n * \\retval #NDK_FAILED Operation unsuccessful. See \\ref SFMacros.h for more details.\n * \\sa NDK_RMNA(), NDK_INTERP_NAN()\n */\n int __stdcall\tNDK_HASNA(const double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n BOOL intermediate ///< [in] is a switch to tune the search for missng values:\n /// - TRUE = Only search for intermediate missing values.\n /// - FALSE = Search for all missing values in X.\n );\n\n\n /// \\name Resampling\n /// resampling API functions calls\n /// @{\n\n /*!\n * \\brief Returns the resampled time series.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SESMTH(), NDK_EWMA(), NDK_DESMTH(), NDK_TESMTH, NDK_LESMTH()\n */\n int __stdcall NDK_RESAMPLE(double* pData, size_t nSize, BOOL isStock, double relSampling, IMPUTATION_METHOD method, double* pOutData, size_t *newSize);\n\n int __stdcall\tNDK_INTERP_BROWN(double* pData , size_t nSize);\n ///@}\n\n\n /// \\name Smoothing\n /// Smoothing API functions calls\n /// @{\n\n /*! \n * \\brief Returns the weighted moving (rolling/running) average using the previous m data points.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SESMTH(), NDK_EWMA(), NDK_DESMTH(), NDK_TESMTH, NDK_LESMTH()\n */\n int __stdcall NDK_WMA(double *pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n BOOL bAscending, ///< [in] is the time order in the data series (i.e. the first data point's corresponding date (earliest date=1 (default), latest date=0)). \n double* weights, ///< [in] is the size of the equal-weighted window or an array of multiplying factors (i.e. weights) of the moving/rolling window. \n size_t nwSize, ///< [in] is the number of elements in the weights array.\n int nHorizon, ///< [in] is the forecast time/horizon beyond the end of X. If missing, a default value of 0 (Latest or end of X) is assumed.\n double* retVal ///< [out] is the calculated value of the weighted moving average.\n );\n\n\n /*! \n * \\brief Returns the (Brown's) simple exponential (EMA) smoothing estimate of the value of X at time t+m (based on the raw data up to time t).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_WMA(), NDK_EWMA(), NDK_DESMTH(), NDK_TESMTH, NDK_LESMTH()\n */\n int __stdcall NDK_SESMTH(double *pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n BOOL bAscending, ///< [in] is the time order in the data series (i.e. the first data point's corresponding date (earliest date=1 (default), latest date=0)).\n double* alpha, ///< [inout] is the smoothing factor (alpha should be between zero and one (exclusive)). If missing or omitted, a value of 0.333 is used.\n int nHorizon, ///< [in] is the forecast time horizon beyond the end of X. If missing, a default value of 0 (latest or end of X) is assumed.\n BOOL bOptimize, ///< [in] is a flag (True/False) for searching and using the optimal value of the smoothing factor. If missing or omitted, optimize is assumed false.\n double* internals, ///< [out,opt] is an array of the intermediate forecast calculation.\n size_t nInternalsSize, ///< [inout,opt] size of the output buffer, and number or values to return.\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n /*! \n * \\brief Returns the (Holt-Winter's) double exponential smoothing estimate of the value of X at time T+m.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_WMA(), NDK_EWMA(), NDK_SESMTH(), NDK_TESMTH, NDK_LESMTH()\n */\n int __stdcall NDK_DESMTH(double *pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n BOOL bAscending, ///< [in] is the time order in the data series (i.e. the first data point's corresponding date (earliest date=1 (default), latest date=0)).\n double *alpha, ///< [in] is the data smoothing factor (alpha should be between zero and one (exclusive)).\n double *beta, ///< [in] is the trend smoothing factor (beta should be between zero and one (exclusive)).\n int xlHorizon, ///< [in] is the forecast time horizon beyond the end of X. If missing, a default value of 0 (latest or end of X) is assumed.\n BOOL bOptimize, ///< [in] is a flag (True/False) for searching and using the optimal value of the smoothing factor. If missing or omitted, optimize is assumed false. \n double* internals, ///< [out,opt] is an array of the intermediate forecast calculation.\n size_t nInternalsSize, ///< [in,opt] size of the output buffer, and number or values to return.\n WORD wInternalSeries, ///< [in, opt] a switch to select the series to return in internals ( 0 = Smoothing forecast, 1=level, 2=trend)\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n /*! \n * \\brief Returns the (Brown's) linear exponential smoothing estimate of the value of X at time T+m (based on the raw data up to time t).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_WMA(), NDK_EWMA(), NDK_SESMTH(), NDK_TESMTH, NDK_DESMTH()\n */\n int __stdcall NDK_LESMTH( double *pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n BOOL bAscending, ///< [in] is the time order in the data series (i.e. the first data point's corresponding date (earliest date=1 (default), latest date=0)).\n double *alpha, ///< [in] is the smoothing factor (alpha should be between zero and one (exclusive)). If missing or omitted, a value of 0.333 is used.\n int xlHorizon, ///< [in] is the forecast time horizon beyond the end of X. If missing, a default value of 0 (latest or end of X) is assumed.\n BOOL bOptimize, ///< [in] is a flag (True/False) for searching and using the optimal value of the smoothing factor. If missing or omitted, optimize is assumed false. \n double* internals, ///< [out,opt] is an array of the intermediate forecast calculation.\n size_t nInternalsSize, ///< [in,opt] size of the output buffer, and number or values to return.\n WORD wInternalSeries, ///< [in, opt] a switch to select the series to return in internals ( 0 = Smoothing forecast, 1=level, 2=trend)\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n /*! \n * \\brief Returns the (Winters's) triple exponential smoothing estimate of the value of X at time T+m.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_WMA(), NDK_EWMA(), NDK_SESMTH(), NDK_LESMTH, NDK_DESMTH()\n */\n int __stdcall NDK_TESMTH(double *pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n BOOL bAscending, ///< [in] is the time order in the data series (i.e. the first data point's corresponding date (earliest date=1 (default), latest date=0)).\n double *alpha, ///< [in] is the data smoothing factor (alpha should be between zero and one (exclusive)).\n double *beta, ///< [in] is the trend smoothing factor (beta should be between zero and one (exclusive)).\n double *gamma, ///< [in] is the seasonal change smoothing factor (Gamma should be between zero and one (exclusive)).\n int L, ///< [in] is the season length.\n int nHorizon, ///< [in] is the forecast time horizon beyond the end of X. If missing, a default value of 0 (latest or end of X) is assumed.\n BOOL bOptimize, ///< [in] is a flag (True/False) for searching and using optimal value of the smoothing factor. If missing or omitted, optimize is assumed false. \n double* internals, ///< [out,opt] is an array of the intermediate forecast calculation.\n size_t nInternalsSize, ///< [in,opt] size of the output buffer, and number or values to return.\n WORD wInternalSeries, ///< [in, opt] a switch to select the series to return in internals ( 0 = Smoothing forecast, 1=level, 2=trend)\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n\n /*!\n * \\brief Returns the (Winters's) triple exponential smoothing estimate of the value of X at time T+m.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_WMA(), NDK_EWMA(), NDK_SESMTH(), NDK_LESMTH, NDK_DESMTH()\n */\n int __stdcall NDK_GESMTH(double *pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n BOOL bAscending, ///< [in] is the time order in the data series (i.e. the first data point's corresponding date (earliest date=1 (default), latest date=0)).\n double *alpha, ///< [in] is the data smoothing factor (alpha should be between zero and one (exclusive)).\n double *beta, ///< [in] is the trend smoothing factor (beta should be between zero and one (exclusive)).\n double *gamma, ///< [in] is the seasonal change smoothing factor (Gamma should be between zero and one (exclusive)).\n double *phi, ///< [in] is the damping coefficient for the trend.\n double *lambda, ///< [in] is the coefficient value for the autocorrelation adjustment\n WORD TrendType, ///< [in] is the type of trend in the model (0=none, 1=additive, 2- damped additive, 3=multiplicative, 4=damped multiplicative)\n WORD SeasonalityType, ///< [in] is the type of seasonality in the modem (0=none, 1=additive, 2=multiplicative)\n int seasonLength, ///< [in] is the season length.\n int nHorizon, ///< [in] is the forecast time horizon beyond the end of X. If missing, a default value of 0 (latest or end of X) is assumed.\n BOOL bOptimize, ///< [in] is a flag (True/False) for searching and using optimal value of the smoothing factor. If missing or omitted, optimize is assumed false. \n BOOL bAutoCorrelationAdj, ///< [in] is a flag (True/False) for adding a correction term for the 1st ourder autocorrelation in the\n BOOL bLogTransform, ///< [in] is a flag (True/False) for applying natural log transform to the input data prior to smoothing.\n double* internals, ///< [out,opt] is an array of the intermediate forecast calculation.\n size_t nInternalsSize, ///< [in,opt] size of the output buffer, and number or values to return.\n WORD wInternalSeries, ///< [in, opt] a switch to select the series to return in internals ( 0 = one-step forecasting, 1=level, 2=trend, 3=seasonality)\n double* retVal ///< [out] is the calculated value of this function.\n );\n\n /*! \n * \\brief Returns values along a trend curve (e.g. linear, quadratic, exponential, etc.) at time T+m.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_WMA(), NDK_EWMA(), NDK_SESMTH(), NDK_LESMTH, NDK_DESMTH(), NDK_TESMTH\n */\n int __stdcall NDK_TREND(double *pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of elements in pData.\n BOOL bAscending, ///< [in] is the time order in the data series (i.e. the first data point's corresponding date (earliest date=1 (default), latest date=0)).\n WORD nTrendType, ///< [in] is the model description flag for the trend function:\n /// 1. Linear\n /// 2. Polynomial\n /// 3. Exponential\n /// 4. Logarithmic\n /// 5. Power\n\n WORD argPolyOrder, ///< [in] is the polynomial order. This is only relevant for a polynomial trend type and is ignored for all others. If missing, POrder = 1. \n BOOL AllowIntercep, ///< [in] is a switch to include or exclude an intercept in the regression.\n double InterceptVal, ///< [in] is the constant or the intercept value to fix (e.g. zero). If missing (i.e. NaN), an intercept will not be fixed and is computed normally.\n int nHorizon, ///< [in] is the forecast time horizon beyond the end of X. If missing, a default value of 0 (latest or end of X) is assumed.\n WORD retType, ///< [in] is a switch to select the return output: \n /// 1. Forecast value\n /// 2. Upper limit of the confidence interval\n /// 3. Lower limit of the confidence interval\n /// 4. R-Squared\n double argAlpha, ///< [in] is the statistical significance or confidence level (i.e. alpha). If missing or omitted, an alpha value of 5% is assumed.\n double* retVal ///< [out] is the calculated value of this function.\n );\n ///@}\n\n\n\n\n /// \\name Multiple Linear Regression (MLR)\n /// @{\n int __stdcall\tNDK_SLR_PARAM (double* pXData, size_t nXSize, \n double* pYData, size_t nYSize,\n double intercept,\n double alpha,\n WORD nRetType,\n WORD ParamIndex,\n double* retVal);\n int __stdcall\tNDK_SLR_FORE (double* pXData, size_t nXSize, \n double* pYData, size_t nYSize,\n double intercept,\n double target,\n double alpha,\n WORD nRetType,\n double* retVal);\n int __stdcall\tNDK_SLR_FITTED (double* pXData, size_t nXSize, \n double* pYData, size_t nYSize,\n double intercept,\n WORD nRetType);\n int __stdcall\tNDK_SLR_ANOVA ( double* pXData, size_t nXSize, \n double* pYData, size_t nYSize,\n double intercept,\n WORD nRetType,\n double* retVal);\n int __stdcall\tNDK_SLR_GOF ( double* pXData, size_t nXSize, \n double* pYData, size_t nYSize,\n double intercept,\n WORD nRetType,\n double* retVal);\n \n /*! \n * \\brief Calculates the OLS regression coefficients values.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_MLR_FORE(), NDK_MLR_FITTED(), NDK_MLR_ANOVA(), NDK_MLR_GOF, NDK_MLR_PRFTest, NDK_MLR_STEPWISE\n */\n int __stdcall\tNDK_MLR_PARAM (double** X, ///< [in] is the independent (explanatory) variables data matrix, such that each column represents one variable.\n size_t nXSize, ///< [in] is the number of observations (rows) in X.\n size_t nXVars, ///< [in] is the number of independent (explanatory) variables (columns) in X.\n LPBYTE mask, ///< [in] is the boolean array to choose the explanatory variables in the model. If missing, all variables in X are included.\n size_t nMaskLen, ///< [in] is the number of elements in the \"mask.\"\n double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array of cells).\n size_t nYSize, ///< [in] is the number of observations in Y.\n double intercept, ///< [in] is the constant or intercept value to fix (e.g. zero). If missing (i.e. NaN), an intercept will not be fixed and is computed normally.\n double alpha, ///< [in] is the statistical significance of the test (i.e. alpha). If missing or omitted, an alpha value of 5% is assumed.\n WORD nRetType, ///< [in] is a switch to select the return output (1=value (default), 2=std. error, 3=t-stat, 4=P-value, 5=upper limit (CI), 6=lower limit (CI)):\n /// 1. Value (mean)\n /// 2. Std error\n /// 3. Test score\n /// 4. P-value\n /// 5. Upper limit of the confidence interval\n /// 6. Lower limit of the confidence interval\n WORD nParamIndex, ///< [in] is a switch to designate the target parameter (0=intercept (default), 1=first variable, 2=2nd variable, etc.).\n double* retVal ///< [out] is the computed statistics of the regression coefficient.\n );\n\n /*! \n * \\brief Calculates the forecast mean, error and confidence interval. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_MLR_PARAM(), NDK_MLR_FITTED(), NDK_MLR_ANOVA(), NDK_MLR_GOF, NDK_MLR_PRFTest, NDK_MLR_STEPWISE\n */\n int __stdcall\tNDK_MLR_FORE (double** X, ///< [in] is the independent (explanatory) variables data matrix, such that each column represents one variable.\n size_t nXSize, ///< [in] is the number of observations (rows) in X.\n size_t nXVars, ///< [in] is the number of independent (explanatory) variables (columns) in X.\n LPBYTE mask, ///< [in] is the boolean array to choose the explanatory variables in the model. If missing, all variables in X are included.\n size_t nMaskLen, ///< [in] is the number of elements in the \"mask.\"\n double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array of cells).\n size_t nYSize, ///< [in] is the number of observations in Y.\n double intercept, ///< [in] is the constant or intercept value to fix (e.g. zero). If missing (i.e. NaN), an intercept will not be fixed and is computed normally.\n double* target, ///< [in] is the value of the explanatory variables (a one dimensional array).\n double alpha, ///< [in] is the statistical significance of the test (i.e. alpha). If missing or omitted, an alpha value of 5% is assumed.\n WORD nRetType, ///< [in] is a switch to select the return output (1=forecast (default), 2=error, 3=upper limit, 4=lower limit):\n /// 1. Forecast (mean)\n /// 2. Std error\n /// 3. Upper limit of the confidence interval\n /// 4. Lower limit of the conficence interval\n double* retVal ///< [out] is the computed forecast statistics.\n\n );\n\n /*! \n * \\brief Returns the fitted values of the conditional mean, residuals or leverage measures.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_MLR_FORE(), NDK_MLR_PARAM(), NDK_MLR_ANOVA(), NDK_MLR_GOF, NDK_MLR_PRFTest, NDK_MLR_STEPWISE\n */\n int __stdcall\tNDK_MLR_FITTED (double** X, ///< [in] is the independent (explanatory) variables data matrix, such that each column represents one variable.\n size_t nXSize, ///< [in] is the number of observations (rows) in X.\n size_t nXVars, ///< [in] is the number of independent (explanatory) variables (columns) in X.\n LPBYTE mask, ///< [in] is the boolean array to choose the explanatory variables in the model. If missing, all variables in X are included.\n size_t nMaskLen, ///< [in] is the number of elements in the \"mask.\"\n double* Y, ///< [in] is the response or dependent variable data array (one dimensional array of cells).\n size_t nYSize, ///< [in] is the number of observations in Y.\n double intercept, ///< [in] is the constant or intercept value to fix (e.g. zero). If missing (i.e. NaN), an intercept will not be fixed and is computed normally.\n WORD nRetType ///< [in] is a switch to select the return output (1=fitted values (default), 2=residuals, 3=standardized residuals, 4=leverage, 5=Cook's distance).\n /// 1. Fitted/conditional mean\n /// 2. Residuals\n /// 3. Standardized residuals\n /// 4. Leverage factor (H)\n /// 5. Cook's distance (D)\n );\n\n /*! \n * \\brief Calculates the regression model analysis of the variance (ANOVA) values. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_MLR_FORE(), NDK_MLR_PARAM(), NDK_MLR_FITTED(), NDK_MLR_GOF, NDK_MLR_PRFTest, NDK_MLR_STEPWISE\n */\n int __stdcall\tNDK_MLR_ANOVA (double** pXData, ///< [in] is the independent (explanatory) variables data matrix, such that each column represents one variable.\n size_t nXSize, ///< [in] is the number of observations (rows) in X\n size_t nXVars, ///< [in] is the number of independent (explanatory) variables (columns) in X.\n LPBYTE mask, ///< [in] is the boolean array to choose the explanatory variables in the model. If missing, all variables in X are included.\n size_t nMaskLen, ///< [in] is the number of elements in the \"mask.\"\n double* Y, ///< [in] is the response or dependent variable data array (one dimensional array of cells).\n size_t nYSize, ///< [in] is the number of observations in Y.\n double intercept, ///< [in] is the constant or intercept value to fix (e.g. zero). If missing (i.e. NaN), an intercept will not be fixed and is computed normally.\n WORD nRetType, ///< [in] is a switch to select the output (1=SSR (default), 2=SSE, 3=SST, 4=MSR, 5=MSE, 6=F-stat, 7=P-value):\n /// 1. SSR (sum of squares of the regression)\n /// 2. SSE (sum of squares of the residuals)\n /// 3. SST (sum of squares of the dependent variable)\n /// 4. MSR (mean squares of the regression)\n /// 5. MSE (mean squares error or residuals)\n /// 6. F-stat (test score)\n /// 7. Significance F (P-value of the test)\n double* retVal ///< [out] is the calculated statistics ANOVA output.\n );\n\n /*! \n * \\brief Calculates a measure for the goodness of fit (e.g. R^2). \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_MLR_FORE(), NDK_MLR_PARAM(), NDK_MLR_FITTED(), NDK_MLR_GOF, NDK_MLR_PRFTest, NDK_MLR_STEPWISE\n */\n int __stdcall\tNDK_MLR_GOF ( double** X, ///< [in] is the independent (explanatory) variables data matrix, such that each column represents one variable.\n size_t nXSize, ///< [in] is the number of observations (rows) in X.\n size_t nXVars, ///< [in] is the number of independent (explanatory) variables (columns) in X.\n LPBYTE mask, ///< [in] is the boolean array to choose the explanatory variables in the model. If missing, all variables in X are included.\n size_t nMaskLen, ///< [in] is the number of elements in the \"mask.\"\n double* Y, ///< [in] is the response or dependent variable data array (one dimensional array of cells).\n size_t nYSize, ///< [in] is the number of observations in Y.\n double intercept, ///< [in] is the constant or intercept value to fix (e.g. zero). If missing (i.e. NaN), an intercept will not be fixed and is computed normally.\n WORD nRetType, ///< [in] is a switch to select a fitness measure (1=R-square (default), 2=adjusted R-square, 3=RMSE, 4=LLF, 5=AIC, 6=BIC/SIC):\n /// 1. R-square (coefficient of determination)\n /// 2. Adjusted R-square\n /// 3. Regression Error (RMSE)\n /// 4. Log-likelihood (LLF)\n /// 5. Akaike information criterion (AIC)\n /// 6. Schwartz/Bayesian information criterion (SIC/BIC)\n double* retVal ///< [out] is the calculated goodness-of-fit statistics.\n );\n /*! \n * \\brief Calculates the p-value and related statistics of the partial f-test (used for testing the inclusion/exclusion variables).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_MLR_FORE(), NDK_MLR_PARAM(), NDK_MLR_ANOVA(), NDK_MLR_GOF, NDK_MLR_FITTED, NDK_MLR_STEPWISE\n */\n int __stdcall\tNDK_MLR_PRFTest ( double** X, ///< [in] is the independent (explanatory) variables data matrix, such that each column represents one variable.\n size_t nXSize, ///< [in] is the number of observations (rows) in X.\n size_t nXVars, ///< [in] is the number of independent (explanatory) variables (columns) in X.\n double* Y, ///< [in] is the response or dependent variable data array (one dimensional array of cells).\n size_t nYSize, ///< [in] is the number of observations in Y.\n double intercept, ///< [in] is the constant or intercept value to fix (e.g. zero). If missing (i.e. NaN), an intercept will not be fixed and is computed normally.\n LPBYTE mask1, ///< [in] is the boolean array to choose the explanatory variables in model 1. If missing, all variables in X are included.\n size_t nMaskLen1, ///< [in] is the number of elements in \"mask1.\"\n LPBYTE mask2, ///< [in] is the boolean array to choose the explanatory variables in model 2. If missing, all variables in X are included.\n size_t nMaskLen2, ///< [in] is the number of elements in \"mask2.\"\n double alpha, ///< [in] is the statistical significance of the test (i.e. alpha). If missing or omitted, an alpha value of 5% is assumed.\n WORD nRetType, ///< [in] is a switch to select the return output (1 = P-Value (default), 2 = Test Stats, 3 = Critical Value.) \n double* retVal ///< [out] is the calculated test statistics/\n );\n\n /*! \n * \\brief Returns a list of the selected variables after performing the stepwise regression.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_MLR_FORE(), NDK_MLR_PARAM(), NDK_MLR_ANOVA(), NDK_MLR_GOF, NDK_MLR_PRFTest, NDK_MLR_STEPWISE\n */\n int __stdcall\tNDK_MLR_STEPWISE (double** X, ///< [in] is the independent (explanatory) variables data matrix, such that each column represents one variable.\n size_t nXSize, ///< [in] is the number of observations (rows) in X.\n size_t nXVars, ///< [in] is the number of independent (explanatory) variables (columns) in X.\n LPBYTE mask, ///< [inout] is the boolean array to choose the explanatory variables in the model. If missing, all variables in X are included.\n size_t nMaskLen, ///< [in] is the number of elements in the \"mask.\"\n double* Y, ///< [in] is the response or dependent variable data array (one dimensional array of cells).\n size_t nYSize, ///< [in] is the number of observations in Y.\n double intercept, ///< [in] is the constant or intercept value to fix (e.g. zero). If missing (i.e. NaN), an intercept will not be fixed and is computed normally.\n double alpha, ///< [in] is the statistical significance of the test (i.e. alpha). If missing or omitted, an alpha value of 5% is assumed.\n WORD nMode ///< [in] is a switch to select the variable's inclusion/exclusion approach (1=forward selection (default), 2=backward elimination , 3=bi-directional elimination):\n /// 1. Forward selection\n /// 2. Bacward elemination\n /// 3. Bi-directional elemination\n );\n ///@}\n\n\n /// \\name Principal Component Analysis (PCA) \n /// @{\n /*! \n * \\brief Returns an array of cells for the i-th principal component (or residuals).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_PCA_VAR(), NDK_PCR_PARAM(), NDK_PCR_FORE(), NDK_PCR_FITTED(), NDK_PCR_ANOVA(), NDK_PCR_GOF(), NDK_PCR_PRFTest(), NDK_PCR_STEPWISE()\n */\n int __stdcall\tNDK_PCA_COMP (double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nXSize, ///< [in] is the number of observations (i.e. rows) in X\n size_t nXVars, ///< [in] is the number of variables (i.e. columns) in X\n LPBYTE mask, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen, ///< [in] is the number of elements in \n WORD standardize, ///< [in] is a flag or switch to standardize the input variables prior to the analysis:\n /// 1. standardize ((subtract mean and divide by standard deviation)\n /// 2. subtract mean. \n WORD nCompIndex, ///< [in] is the component number to return.\n WORD retType, ///< [in] is a switch to select the return output \n /// 1. proportion of variance, \n /// 2. variance, \n /// 3. eigenvalue, \n /// 4. loadings, \n /// 5. Principal Component (PC) data. \n double* retVal, ///< [out] is the calculated value or data\n size_t nOutSize ///< [in] is the size of retVal\n );\n\n /*! \n * \\brief Returns an array of cells for the fitted values of the i-th input variable.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_PCA_VAR(), NDK_PCR_PARAM(), NDK_PCR_FORE(), NDK_PCR_FITTED(), NDK_PCR_ANOVA(), NDK_PCR_GOF(), NDK_PCR_PRFTest(), NDK_PCR_STEPWISE()\n */\n int __stdcall\tNDK_PCA_VAR ( double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nXSize, ///< [in] is the number of observations (i.e. rows) in X\n size_t nXVars, ///< [in] is the number of variables (i.e. columns) in X\n LPBYTE varMask, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen, ///< [in] is the number of elements in mask\n WORD standardize, ///< [in] is a flag or switch to standardize the input variables prior to the analysis:\n /// 1. standardize ((subtract mean and divide by standard deviation)\n /// 2. subtract mean. \n WORD nVarIndex, ///< [in] is the input variable number\n WORD wMacPC, ///< [in] is the number of principal components (PC) to include\n WORD retType, ///< [in] is a switch to select the return output:\n /// 1. final communality \n /// 2. loading/weights\n /// 3. fitted values \n /// 4. residuals\n double* retVal, ///< [out] is the calculated value or data\n size_t nOutSize ///< [in] is the size of retVal\n );\n /*! \n * \\brief Calculates the regression coefficients values for a given input variable.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_PCA_VAR(), NDK_PCR_PARAM(), NDK_PCR_FORE(), NDK_PCR_FITTED(), NDK_PCR_ANOVA(), NDK_PCR_GOF(), NDK_PCR_PRFTest(), NDK_PCR_STEPWISE()\n */\n int __stdcall\tNDK_PCR_PARAM ( double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nXSize, ///< [in] is the number of observations (i.e. rows) in X\n size_t nXVars, ///< [in] is the number of variables (i.e. columns) in X\n LPBYTE mask, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen, ///< [in] is the number of elements in mask\n double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array)\n size_t nYSize, ///< [in] is the number of elements in Y\n double intercept, ///< [in] is the constant or the intercept value to fix (e.g. zero). If missing (NaN), an intercept will not be fixed and is computed normally\n double alpha, ///< [in] is the statistical significance of the test (i.e. alpha)\n WORD nRetType, ///< [in] is a switch to select the return output:\n /// 1. Value (default), \n /// 2. Std. Error \n /// 3. t-stat \n /// 4. P-Value\n /// 5. Upper Limit (CI)\n /// 6. Lower Limit (CI)) \n WORD nParamIndex, ///< [in] is a switch to designate the target parameter (0 = intercept (default), 1 = first variable, 2 = 2nd variable, etc.). \n double* retVal ///< [out] is the calculated parameter value or statistics.\n );\n\n /*! \n * \\brief Calculates the model's estimated values, std. errors and related statistics.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_PCA_VAR(), NDK_PCR_PARAM(), NDK_PCR_FORE(), NDK_PCR_FITTED(), NDK_PCR_ANOVA(), NDK_PCR_GOF(), NDK_PCR_PRFTest(), NDK_PCR_STEPWISE()\n */\n int __stdcall\tNDK_PCR_FORE (double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nXSize, ///< [in] is the number of observations (i.e. rows) in X\n size_t nXVars, ///< [in] is the number of variables (i.e. columns) in X\n LPBYTE mask, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen, ///< [in] is the number of elements in mask\n double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array)\n size_t nYSize, ///< [in] is the number of elements in Y \n double intercept, ///< [in] is the constant or the intercept value to fix (e.g. zero). If missing (NaN), an intercept will not be fixed and is computed normally\n double* target, ///< [in] is the value of the explanatory variables (a one dimensional array) \n double alpha, ///< [in] is the statistical significance of the test (i.e. alpha)\n WORD nRetType, ///< [in] is a switch to select the return output (1 = forecast (default), 2 = error, 3 = upper limit, 4 = lower limit). \n double* retVal ///< [out] is the calculated forecast value or statistics. \n );\n\n /*! \n * \\brief Returns an array of cells for the i-th principal component (or residuals).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_PCA_VAR(), NDK_PCR_PARAM(), NDK_PCR_FORE(), NDK_PCR_FITTED(), NDK_PCR_ANOVA(), NDK_PCR_GOF(), NDK_PCR_PRFTest(), NDK_PCR_STEPWISE()\n */\n int __stdcall\tNDK_PCR_FITTED (double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nXSize, ///< [in] is the number of observations (i.e. rows) in X\n size_t nXVars, ///< [in] is the number of variables (i.e. columns) in X\n LPBYTE mask, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen, ///< [in] is the number of elements in mask\n double* Y, ///< [inout] is the response or the dependent variable data array (one dimensional array)\n size_t nYSize, ///< [in] is the number of elements in Y \n double intercept, ///< [in] is the constant or the intercept value to fix (e.g. zero). If missing (NaN), an intercept will not be fixed and is computed normally\n WORD nRetType ///< [in] is a switch to select the return output \n /// 1. fitted values (default), \n /// 2. residuals, \n /// 3. standardized residuals, \n /// 4. leverage (H), \n /// 5. Cook's distance. \n );\n\n /*! \n * \\brief Returns an array of cells for the i-th principal component (or residuals).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_PCA_VAR(), NDK_PCR_PARAM(), NDK_PCR_FORE(), NDK_PCR_FITTED(), NDK_PCR_ANOVA(), NDK_PCR_GOF(), NDK_PCR_PRFTest(), NDK_PCR_STEPWISE()\n */\n int __stdcall\tNDK_PCR_ANOVA ( double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nXSize, ///< [in] is the number of observations (i.e. rows) in X\n size_t nXVars, ///< [in] is the number of variables (i.e. columns) in X\n LPBYTE mask, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen, ///< [in] is the number of elements in mask\n double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array)\n size_t nYSize, ///< [in] is the number of elements in Y \n double intercept, ///< [in] is the constant or the intercept value to fix (e.g. zero). If missing (NaN), an intercept will not be fixed and is computed normally\n WORD nRetType, ///< [in] is a switch to select the return output:\n /// 1. SSR (sum of squares of the regression)\n /// 2. SSE (sum of squares of the residuals)\n /// 3. SST (sum of squares of the dependent variable)\n /// 4. MSR (mean squares of the regression)\n /// 5. MSE (mean squares error or residuals)\n /// 6. F-stat (test score)\n /// 7. Significance F (P-value of the test)\n double* retVal ///< [out] is the calculated statistics ANOVA output.\n );\n\n /*! \n * \\brief Returns an array of cells for the i-th principal component (or residuals).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_PCA_VAR(), NDK_PCR_PARAM(), NDK_PCR_FORE(), NDK_PCR_FITTED(), NDK_PCR_ANOVA(), NDK_PCR_GOF(), NDK_PCR_PRFTest(), NDK_PCR_STEPWISE()\n */\n int __stdcall\tNDK_PCR_GOF ( double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nXSize, ///< [in] is the number of observations (i.e. rows) in X\n size_t nXVars, ///< [in] is the number of variables (i.e. columns) in X\n LPBYTE mask, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen, ///< [in] is the number of elements in mask\n double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array)\n size_t nYSize, ///< [in] is the number of elements in Y \n double intercept, ///< [in] is the constant or the intercept value to fix (e.g. zero). If missing (NaN), an intercept will not be fixed and is computed normally\n WORD nRetType, ///< [in] is a switch to select a fitness measure (1 = R-Square (default), 2 = Adjusted R Square, 3 = RMSE, 4 = LLF, 5 = AIC, 6 = BIC/SIC ). \n /// 1. R-square (coefficient of determination)\n /// 2. Adjusted R-square\n /// 3. Regression Error (RMSE)\n /// 4. Log-likelihood (LLF)\n /// 5. Akaike information criterion (AIC)\n /// 6. Schwartz/Bayesian information criterion (SIC/BIC)\n double* retVal ///< [out] is the calculated goodness of fit measure\n );\n /*! \n * \\brief Returns an array of cells for the i-th principal component (or residuals).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_PCA_VAR(), NDK_PCR_PARAM(), NDK_PCR_FORE(), NDK_PCR_FITTED(), NDK_PCR_ANOVA(), NDK_PCR_GOF(), NDK_PCR_PRFTest(), NDK_PCR_STEPWISE()\n */\n int __stdcall\tNDK_PCR_PRFTest ( double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nXSize, ///< [in] is the number of observations (i.e. rows) in X\n size_t nXVars, ///< [in] is the number of variables (i.e. columns) in X\n double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array)\n size_t nYSize, ///< [in] is the number of elements in Y \n double intercept, ///< [in] is the constant or the intercept value to fix (e.g. zero). If missing (NaN), an intercept will not be fixed and is computed normally\n LPBYTE mask1, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen1, ///< [in] is the number of elements in mask1\n LPBYTE mask2, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen2, ///< [in] is the number of elements in mask2\n double alpha, ///< [in] is the statistical significance of the test (i.e. alpha)\n WORD nRetType, ///< [in] is a switch to select the return output (1 = P-Value (default), 2 = Test Stats, 3 = Critical Value.) \n double* retVal ///< [out] is the calculated test statistics/\n );\n\n /*! \n * \\brief Returns an array of cells for the i-th principal component (or residuals).\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_PCA_VAR(), NDK_PCR_PARAM(), NDK_PCR_FORE(), NDK_PCR_FITTED(), NDK_PCR_ANOVA(), NDK_PCR_GOF(), NDK_PCR_PRFTest(), NDK_PCR_STEPWISE()\n */\n int __stdcall\tNDK_PCR_STEPWISE (double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nXSize, ///< [in] is the number of observations (i.e. rows) in X\n size_t nXVars, ///< [in] is the number of variables (i.e. columns) in X\n LPBYTE mask, ///< [in] is the boolean array to select a subset of the input variables in X. If missing (i.e. NULL), all variables in X are included. \n size_t nMaskLen, ///< [in] is the number of elements in mask\n double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array)\n size_t nYSize, ///< [in] is the number of elements in Y \n double intercept, ///< [in] is the constant or the intercept value to fix (e.g. zero). If missing (NaN), an intercept will not be fixed and is computed normally\n double alpha, ///< [in] is the statistical significance of the test (i.e. alpha)\n WORD nMode ///< [in] is a switch to select the variable's inclusion/exclusion approach (1=forward selection (default), 2=backward elimination , 3=bi-directional elimination):\n /// 1. Forward selection\n /// 2. Bacward elemination\n /// 3. Bi-directional elemination\n );\n ///@}\n\n\n\n\n\n /// \\name GLM\n /// Gneralized Linear Model Functions\n /// @{\n \n\n /*! \n * \\brief Examines the model's parameters for constraints (e.g. positive variance, etc.).\n * \\details\n * \\htmlonly\n

Notes

\n
    \n
  1. The number of betas must be equal to the number of explanatory variables (i.e. X) plus one (intercept).
  2. \n
  3. \n For GLM with Poisson distribution:\n
      \n
    • The values of the response variables must be non-negative integers.
    • \n
    • The value of the dispersion factor (Phi) value must be either missing or equal to one.
    • \n
    \n
  4. \n
  5. \n For GLM with Binomial distribution,\n
      \n
    • The values of the response variable must be non-negative fractions between zero and one, inclusive.
    • \n
    • The value of the dispersion factor (Phi) must be a positive fraction (greater than zero, and less than one).
    • \n
    \n
  6. \n
  7. For GLM with Guassian distribution, the dispersion factor (Phi) value must be positive.
  8. \n
\n * \\endhtmlonly\n * \\return status code of the operation\n * \\retval #NDK_TRUE GLM model is valid\n * \\retval #NDK_FALSE GLM model in invalid. For other return values, see \\ref SFMacros.h\n * \\sa NDK_GLM_FITTED(), NDK_GLM_RESID(), NDK_GLM_PARAM(), NDK_GLM_FORE\n */\n int __stdcall NDK_GLM_VALIDATE(double* betas, ///< [in] are the coefficients of the GLM model (a one dimensional array)\n size_t nBetas, ///< [in] is the number of the coefficients in betas. Note that nBetas must be equal to nVars+1\n double phi, ///< [in] is the GLM dispersion paramter. Phi is only meaningful for Binomial (1/batch or trial size) and for Guassian (variance). \n /// - Binomial : phi = Reciprocal of the batch/trial size.\n /// - Gaussion : phi = variance.\n /// - Poisson : phi = 1.0\n WORD Lvk ///< [in] is the link function that describes how the mean depends on the linear predictor (see #GLM_LINK_FUNC). \n /// 1. Identity (default)\n /// 2. Log\n /// 3. Logit\n /// 4. Probit\n /// 5. Complementary log-log\n );\n\n\n\n /*! \n * \\brief Computes the log-likelihood ((LLF), Akaike Information Criterion (AIC) or other goodness of fit function of the GLM model. \n * \n \\htmlonly\n

Notes

\n
    \n
  1. Missng values (i.e. #N/A!) are not allowed in the either response(Y) or the explanatory input arrays.
  2. \n
  3. The number of rows in response variable (Y) must be equal to number of rows of the explanatory variables (X).
  4. \n
  5. The number of betas must equal to the number of explanatory variables (i.e. X) plus one (intercept).
  6. \n
  7. \n For GLM with Poisson distribution,\n
      \n
    • The values of response variable must be non-negative integers.
    • \n
    • The value of the dispersion factor (Phi) value must be either missing or equal to one.
    • \n
    \n
  8. \n
  9. \n For GLM with Binomial distribution,\n
      \n
    • The values of the response variable must be non-negative fractions between zero and one, inclusive.
    • \n
    • The value of the dispersion factor (Phi) must be a positive fraction (greater than zero, and less than one).
    • \n
    \n
  10. \n
  11. For GLM with Guassian distribution, the dispersion factor (Phi) value must be positive.
  12. \n
\n \\endhtmlonly\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GLM_FITTED(), NDK_GLM_RESID(), NDK_GLM_PARAM(), NDK_GLM_FORE\n */\n int __stdcall NDK_GLM_GOF( double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array)\n size_t nSize, ///< [in] is the number of observations \n double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nVars, ///< [in] is the number of independent variables (or columns in X)\n double* betas, ///< [in] are the coefficients of the GLM model (a one dimensional array)\n size_t nBetas, ///< [in] is the number of the coefficients in betas. Note that nBetas must be equal to nVars+1\n double phi, ///< [in] is the GLM dispersion paramter. Phi is only meaningful for Binomial (1/batch or trial size) and for Guassian (variance). \n /// - Binomial : phi = Reciprocal of the batch/trial size.\n /// - Gaussion : phi = variance.\n /// - Poisson : phi = 1.0\n WORD Lvk, ///< [in] is the link function that describes how the mean depends on the linear predictor (see #GLM_LINK_FUNC). \n /// 1. Identity (default)\n /// 2. Log\n /// 3. Logit\n /// 4. Probit\n /// 5. Complementary log-log\n WORD retType, ///< [in] is a switch to select a fitness measure ( see \\ref #GOODNESS_OF_FIT_FUNC)\n double* retVal ///< [out] is the calculated goodness of fit measure.\n );\n\n /*! \n * \\brief Returns the standardized residuals/errors of a given GLM. \n * \n \\htmlonly\n

Notes

\n
    \n
  1. Missng values (i.e. #N/A!) are not allowed in the either response(Y) or the explanatory input arrays.
  2. \n
  3. The number of rows in response variable (Y) must be equal to number of rows of the explanatory variables (X).
  4. \n
  5. The number of betas must equal to the number of explanatory variables (i.e. X) plus one (intercept).
  6. \n
  7. \n For GLM with Poisson distribution,\n
      \n
    • The values of response variable must be non-negative integers.
    • \n
    • The value of the dispersion factor (Phi) value must be either missing or equal to one.
    • \n
    \n
  8. \n
  9. \n For GLM with Binomial distribution,\n
      \n
    • The values of the response variable must be non-negative fractions between zero and one, inclusive.
    • \n
    • The value of the dispersion factor (Phi) must be a positive fraction (greater than zero, and less than one).
    • \n
    \n
  10. \n
  11. For GLM with Guassian distribution, the dispersion factor (Phi) value must be positive.
  12. \n
\n \\endhtmlonly\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GLM_FITTED(), NDK_GLM_RESID(), NDK_GLM_PARAM(), NDK_GLM_FORE\n */\n int __stdcall NDK_GLM_RESID( double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array)\n size_t nSize, ///< [in] is the number of observations \n double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nVars, ///< [in] is the number of independent variables (or columns in X)\n double* betas, ///< [in] are the coefficients of the GLM model (a one dimensional array)\n size_t nBetas, ///< [in] is the number of the coefficients in betas. Note that nBetas must be equal to nVars+1\n double phi, ///< [in] is the GLM dispersion paramter. Phi is only meaningful for Binomial (1/batch or trial size) and for Guassian (variance). \n /// - Binomial : phi = Reciprocal of the batch/trial size.\n /// - Gaussion : phi = variance.\n /// - Poisson : phi = 1.0\n WORD Lvk, ///< [in] is the link function that describes how the mean depends on the linear predictor (see #GLM_LINK_FUNC). \n /// 1. Identity (default)\n /// 2. Log\n /// 3. Logit\n /// 4. Probit\n /// 5. Complementary log-log\n WORD retType ///< [in] is a switch to select a residuals-type:raw or standardized. see \\ref #RESID_RETVAL_FUNC\n );\n\n /*! \n * \\brief Returns an array of cells for the initial (non-optimal), optimal or standard errors of the model's parameters \n * \n \\htmlonly\n

Notes

\n
    \n
  1. Missng values (i.e. #N/A!) are not allowed in the either response(Y) or the explanatory input arrays.
  2. \n
  3. The number of rows in response variable (Y) must be equal to number of rows of the explanatory variables (X).
  4. \n
  5. The number of betas must equal to the number of explanatory variables (i.e. X) plus one (intercept).
  6. \n
  7. \n For GLM with Poisson distribution,\n
      \n
    • The values of response variable must be non-negative integers.
    • \n
    • The value of the dispersion factor (Phi) value must be either missing or equal to one.
    • \n
    \n
  8. \n
  9. \n For GLM with Binomial distribution,\n
      \n
    • The values of the response variable must be non-negative fractions between zero and one, inclusive.
    • \n
    • The value of the dispersion factor (Phi) must be a positive fraction (greater than zero, and less than one).
    • \n
    \n
  10. \n
  11. For GLM with Guassian distribution, the dispersion factor (Phi) value must be positive.
  12. \n
\n \\endhtmlonly\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GLM_FITTED(), NDK_GLM_RESID(), NDK_GLM_GOF(), NDK_GLM_FORE\n */\n int __stdcall NDK_GLM_PARAM( double* Y, ///< [in] is the response or the dependent variable data array (one dimensional array)\n size_t nSize, ///< [in] is the number of observations \n double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nVars, ///< [in] is the number of independent variables (or columns in X)\n double* betas, ///< [inout] are the coefficients of the GLM model (a one dimensional array)\n size_t nBetas, ///< [in] is the number of the coefficients in betas. Note that nBetas must be equal to nVars+1\n double* phi, ///< [inout] is the GLM dispersion paramter. Phi is only meaningful for Binomial (1/batch or trial size) and for Guassian (variance). \n /// - Binomial : phi = Reciprocal of the batch/trial size.\n /// - Gaussion : phi = variance.\n /// - Poisson : phi = 1.0\n WORD Lvk, ///< [in] is the link function that describes how the mean depends on the linear predictor (see #GLM_LINK_FUNC). \n /// 1. Identity (default)\n /// 2. Log\n /// 3. Logit\n /// 4. Probit\n /// 5. Complementary log-log\n WORD retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref #MODEL_RETVAL_FUNC)\n size_t maxIter ///< [in] is the maximum number of iterations used to calibrate the model. If missing, the default maximum of 100 is assumed. \n );\n \n\n /*! \n * \\brief calculates the expected response (i.e. mean) value; given the GLM model and the values of the explanatory variables. \n *\n * \\htmlonly\n

Notes

\n
    \n
  1. Missng values (i.e. #N/A!) are not allowed in the either response(Y) or the explanatory input arrays.
  2. \n
  3. The number of rows in response variable (Y) must be equal to number of rows of the explanatory variables (X).
  4. \n
  5. The number of betas must equal to the number of explanatory variables (i.e. X) plus one (intercept).
  6. \n
  7. For GLM with Poisson distribution,\n
      \n
    • The values of response variable must be non-negative integers.
    • \n
    • The value of the dispersion factor (Phi) value must be either missing or equal to one.
    • \n
    \n
  8. \n
  9. For GLM with Binomial distribution,\n
      \n
    • The values of the response variable must be non-negative fractions between zero and one, inclusive.
    • \n
    • The value of the dispersion factor (Phi) must be a positive fraction (greater than zero, and less than one).
    • \n
    \n
  10. \n
  11. For GLM with Guassian distribution, the dispersion factor (Phi) value must be positive.
  12. \n
\n \\endhtmlonly\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GLM_FITTED(), NDK_GLM_RESID(), NDK_GLM_GOF(), NDK_GLM_FORE\n */\n int __stdcall NDK_GLM_FORE( double* X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nVars, ///< [in] is the number of independent variables (or columns in X)\n double* betas, ///< [inout] are the coefficients of the GLM model (a one dimensional array)\n size_t nBetas, ///< [in] is the number of the coefficients in betas. Note that nBetas must be equal to nVars+1\n double phi, ///< [inout] is the GLM dispersion paramter. Phi is only meaningful for Binomial (1/batch or trial size) and for Guassian (variance). \n /// - Binomial : phi = Reciprocal of the batch/trial size.\n /// - Gaussion : phi = variance.\n /// - Poisson : phi = 1.0\n WORD Lvk, ///< [in] is the link function that describes how the mean depends on the linear predictor (see #GLM_LINK_FUNC). \n /// 1. Identity (default)\n /// 2. Log\n /// 3. Logit\n /// 4. Probit\n /// 5. Complementary log-log\n WORD retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref # FORECAST_RETVAL_FUNC)\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n double* retval ///< [out] is the calculated forecast value\n );\n\n\n int __stdcall NDK_GLM_FITTED( double* Y, ///< [inout] is the response or the dependent variable data array (one dimensional array)\n size_t nSize, ///< [in] is the number of observations \n double** X, ///< [in] is the independent variables data matrix, such that each column represents one variable\n size_t nVars, ///< [in] is the number of independent variables (or columns in X)\n double* betas, ///< [in] are the coefficients of the GLM model (a one dimensional array)\n size_t nBetas, ///< [in] is the number of the coefficients in betas. Note that nBetas must be equal to nVars+1\n double phi, ///< [in] is the GLM dispersion paramter. Phi is only meaningful for Binomial (1/batch or trial size) and for Guassian (variance). \n /// - Binomial : phi = Reciprocal of the batch/trial size.\n /// - Gaussion : phi = variance.\n /// - Poisson : phi = 1.0\n WORD Lvk, ///< [in] is the link function that describes how the mean depends on the linear predictor (see #GLM_LINK_FUNC). \n /// 1. Identity (default)\n /// 2. Log\n /// 3. Logit\n /// 4. Probit\n /// 5. Complementary log-log\n WORD retType ///< [in] is a switch to select a output type ( see \\ref #FIT_RETVAL_FUNC)\n );\n ///@}\n\n /// \\name ARMA\n /// The ARMA model is a tool for understanding and forecasting future values in a given time series. The model consists of two parts: an autoregressive component, i.e. AR(p), and a moving average component, i.e. MA(q), and it is referred to as ARMA(p,q). \n /// @{\n \n /*! \n * \\brief Computes the log-likelihood (LLF), Akaike Information Criterion (AIC) or other goodness of fit functions of the ARMA model.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARMA_PARAM(), NDK_ARMA_VALIDATE(), NDK_ARMA_FORE(), NDK_ARMA_RESID()\n */\n int __stdcall NDK_ARMA_GOF( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the ARMA model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n WORD retType, ///< [in] is a switch to select a fitness measure ( see \\ref #GOODNESS_OF_FIT_FUNC).\n double* retVal ///< [out] is the calculated goodness of fit value.\n );\n\n /*! \n * \\brief Returns the standardized residuals of a given ARMA model \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\deprecated this function is being replaced by NDK_ARMA_FITTED()\n * \\sa NDK_ARMA_PARAM(), NDK_ARMA_VALIDATE(), NDK_ARMA_FORE(), NDK_ARMA_GOF()\n */\n int __stdcall NDK_ARMA_RESID( double* pData, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mean, ///< [in] is the ARMA model mean (i.e. mu). \n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations. \n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in phis (order of AR component)\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in thetas (order of MA component)\n WORD retType ///< [in] is a switch to select a residuals-type:raw or standardized. see \\ref #RESID_RETVAL_FUNC\n );\n\n\n /*! \n * \\brief Returns the initial (non-optimal), optimal or standard errors of the model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARMA_GOF(), NDK_ARMA_VALIDATE(), NDK_ARMA_FORE(), NDK_ARMA_RESID()\n */\n int __stdcall NDK_ARMA_PARAM( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double* mean, ///< [inout] is the ARMA model mean (i.e. mu).\n double* sigma, ///< [inout] is the standard deviation of the model's residuals/innovations.\n double* phis, ///< [inout] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [inout] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n MODEL_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref #MODEL_RETVAL_FUNC).\n size_t maxIter ///< [in] is the maximum number of iterations used to calibrate the model. If missing or less than 100, the default maximum of 100 is assumed. \n );\n\n\n /*! \n * \\brief Calculates the out-of-sample forecast statistics.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARMA_PARAM(), NDK_ARMA_VALIDATE(), NDK_ARMA_GOF(), NDK_ARMA_RESID()\n */\n int __stdcall NDK_ARMA_FORE( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the ARMA model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n size_t nStep, ///< [in] is the forecast time/horizon (expressed in terms of steps beyond end of the time series).\n FORECAST_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned (FORECAST_MEAN, FORECAST_STDEV , ..)\n /// (see \\ref #FORECAST_RETVAL_FUNC).\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n double* retVal ///< [out] is the calculated forecast value.\n );\n\n\n /*! \n * \\brief Returns the fitted values (i.e. mean, volatility and residuals).\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARMA_PARAM(), NDK_ARMA_VALIDATE(), NDK_ARMA_GOF(), NDK_ARMA_RESID(), NDK_ARMA_GOF()\n */\n int __stdcall NDK_ARMA_FITTED( double* pData, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the ARMA model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n FIT_RETVAL_FUNC retType ///< [in] is a switch to select a output type ( see \\ref #FIT_RETVAL_FUNC).\n );\n\n /*! \n * \\brief Examines the model's parameters for stability constraints (e.g. stationarity, invertibility, causality, etc.). \n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero\n *\n * \\return status code of the operation\n * \\retval #NDK_TRUE model is stable\n * \\retval #NDK_FALSE model is instable\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARMA_PARAM(), NDK_ARMA_VALIDATE(), NDK_ARMA_GOF(), NDK_ARMA_RESID(), NDK_ARMA_GOF()\n */\n int __stdcall NDK_ARMA_VALIDATE(double mean, ///< [in] is the ARMA model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q ///< [in] is the number of elements in thetas (order of MA component).\n );\n\n /*! \n * \\brief Returns the simulated values.\n * \n * \\note 1. ARMA_SIM returns an array of one simulation path starting from the end of the input data.\n * \\note 2. The input data argument (i.e. latest observations) is optional. If omitted, an array of zeroes is assumed.\n * \\note 3. The time series is homogeneous or equally spaced.\n * \\note 4. The time series may include missing values (e.g. NaN) at either end.\n * \\note 5. The long-run mean can take any value or be omitted, in which case a zero value is assumed.\n * \\note 6. The residuals/innovations standard deviation (sigma) must be greater than zero.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARMA_PARAM(), NDK_ARMA_VALIDATE(), NDK_ARMA_GOF(), NDK_ARMA_RESID(), NDK_ARMA_GOF()\n */\n int __stdcall NDK_ARMA_SIM(double mean, ///< [in] is the ARMA model long-run mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n double* pData, ///< [in] are the values of the latest (most recent) observations.\n size_t nSize, ///< [in] is the number elements in pData.\n UINT nSeed, ///< [in] is an unsigned integer to initialize the psuedorandom number generator.\n double* retArray, ///< [out] is the output array to hold nSteps future simulations.\n size_t nSteps ///< [in] is the number of future steps to simulate for.\n );\n\n ///@}\n\n /// \\name ARIMA\n /// ARIMA model functions\n /// @{\n\n /*! \n * \\brief Examines the model's parameters for stability constraints (e.g. stationarity, invertibility, causality, etc.).\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The integration order argument (d) must be a positive integer.\n * \\note 3. The time series may include missing values (e.g. NaN) at either end.\n * \\note 4. The long-run mean can take any value or may be omitted, in which case a zero value is assumed.\n * \\note 5. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARIMA_GOF(), NDK_ARIMA_PARAM(), NDK_ARIMA_FORE(), NDK_ARIMA_FITTED(), NDK_ARIMA_SIM()\n */\n\n int __stdcall NDK_ARIMA_VALIDATE( double mean, ///< [in] is the ARMA model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the integration order.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q ///< [in] is the number of elements in thetas (order of MA component).\n );\n /*! \n * \\brief Computes the log-likelihood ((LLF), Akaike Information Criterion (AIC) or other goodness of fit functions of the ARIMA model.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARIMA_VALIDATE(), NDK_ARIMA_PARAM(), NDK_ARIMA_FORE(), NDK_ARIMA_FITTED(), NDK_ARIMA_SIM()\n */\n int __stdcall NDK_ARIMA_GOF( double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the ARMA model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the model's integration order.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n GOODNESS_OF_FIT_FUNC retType, ///< [in] is a switch to select a fitness measure ( see \\ref #GOODNESS_OF_FIT_FUNC).\n double* retVal ///< [out] is the calculated GOF return value.\n );\n /*! \n * \\brief Returns the quick guess, optimal (calibrated) or std. errors of the values of the model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The integration order argument (d) must be a positive integer.\n * \\note 4. The long-run mean can take any value or may be omitted, in which case a zero value is assumed.\n * \\note 5. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARIMA_VALIDATE(), NDK_ARIMA_GOF(), NDK_ARIMA_FORE(), NDK_ARIMA_FITTED(), NDK_ARIMA_SIM()\n */\n int __stdcall NDK_ARIMA_PARAM( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double* mean, ///< [inout] is the ARMA model mean (i.e. mu).\n double* sigma, ///< [inout] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the model's integration order.\n double* phis, ///< [inout] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [inout] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n MODEL_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref #MODEL_RETVAL_FUNC).\n size_t maxIter ///< [in] is the maximum number of iterations used to calibrate the model. If missing or less than 100, the default maximum of 100 is assumed.\n );\n\n\n /*! \n * \\brief Calculates the out-of-sample simulated values. \n * \n * \\note 1. The input data argument (i.e. latest observations) is optional. If omitted, an array of zeroes is assumed. \n * \\note 2. The time series is homogeneous or equally spaced.\n * \\note 3. The time series may include missing values (e.g. NaN) at either end.\n * \\note 4. The input data argument (i.e. latest observations) is optional. If omitted, an array of zeroes is assumed. \n * \\note 5. The residuals/innovations standard deviation (sigma) must be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARIMA_VALIDATE(), NDK_ARIMA_GOF(), NDK_ARIMA_FORE(), NDK_ARIMA_FITTED(), NDK_ARIMA_PARAM()\n */\n int __stdcall NDK_ARIMA_SIM( double mean, ///< [in] is the ARMA model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the model's integration order.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n UINT nSeed, ///< [in] is an unsigned integer for setting up the random number generators.\n double* retVal, ///< [out] is the calculated simulation value.\n size_t nSteps ///< [in] is the number of future steps to simulate for.\n );\n /*! \n * \\brief Calculates the out-of-sample conditional forecast (i.e. mean, error, and confidence interval).\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The integration order argument (d) must be a positive integer.\n * \\note 4. The long-run mean can take any value or may be omitted, in which case a zero value is assumed.\n * \\note 5. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARIMA_VALIDATE(), NDK_ARIMA_GOF(), NDK_ARIMA_SIM(), NDK_ARIMA_FITTED(), NDK_ARIMA_PARAM()\n */\n int __stdcall NDK_ARIMA_FORE( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the ARMA model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the model's integration order.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n size_t nStep, ///< [in] is the forecast time/horizon (expressed in terms of steps beyond end of the time series).\n FORECAST_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned (see \\ref #FORECAST_RETVAL_FUNC).\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n double* retVal ///< [out] is the calculated forecast value.\n );\n /*! \n * \\brief Returns the in-sample model fitted values of the conditional mean, volatility or residuals.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The integration order argument (d) must be a positive integer.\n * \\note 4. The long-run mean can take any value or may be omitted, in which case a zero value is assumed.\n * \\note 5. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_ARIMA_VALIDATE(), NDK_ARIMA_GOF(), NDK_ARIMA_SIM(), NDK_ARIMA_FORE(), NDK_ARIMA_PARAM()\n */\n int __stdcall NDK_ARIMA_FITTED( double* pData, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the ARMA model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the model's integration order.\n double* phis, ///< [in] are the parameters of the AR(p) component model (starting with the lowest lag).\n size_t p, ///< [in] is the number of elements in phis (order of AR component).\n double* thetas, ///< [in] are the parameters of the MA(q) component model (starting with the lowest lag).\n size_t q, ///< [in] is the number of elements in thetas (order of MA component).\n FIT_RETVAL_FUNC retType ///< [in] is a switch to select a output type ( see \\ref #FIT_RETVAL_FUNC).\n );\n ///@}\n\n\n /// \\name FARIMA\n /// Fractional ARIMA model functions\n /// @{\n /*! \n * \\brief Computes the log-likelihood ((LLF), Akaike Information Criterion (AIC) or other goodness of fit function of the FARIMA model. \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_RESID(), NDK_AIRLINE_PARAM(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED(), NDK_AIRLINE_VALIDATE()\n */\n int __stdcall NDK_FARIMA_GOF( double* pData, size_t nSize, double mean, double sigma, double nIntegral, double* phis, size_t p, double* thetas, size_t q, WORD retType, double* retVal);\n /*! \n * \\brief Returns the standardized residuals of a given FARIMA model \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_GOF(), NDK_AIRLINE_PARAM(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED(), NDK_AIRLINE_VALIDATE()\n */\n int __stdcall NDK_FARIMA_RESID( double* pData/*IN-OUT*/, size_t nSize, double mean, double sigma, double nIntegral, double* phis, size_t p, double* thetas, size_t q, WORD retType);\n /*! \n * \\brief Returns the initial (non-optimal), optimal or standard errors of the model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_GOF(), NDK_AIRLINE_RESID(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED(), NDK_AIRLINE_VALIDATE()\n */\n int __stdcall NDK_FARIMA_PARAM( double* pData, size_t nSize, double* mean, double* sigma, double nIntegral, double* phis, size_t p, double* thetas, size_t q, WORD retType, size_t maxIter);\n\n /*! \n * \\brief Returns a simulated data series the underlying FARIMA process.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_FARIMA_GOF(), NDK_AIRLINE_RESID(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED(), NDK_AIRLINE_VALIDATE()\n */\n int __stdcall NDK_FARIMA_SIM( double* pData, size_t nSize, double mean, double sigma, double nIntegral, double* phis, size_t p, double* thetas, size_t q, size_t nStep , size_t nSeed, double* retVal);\n\n /*! \n * \\brief Calculates the out-of-sample forecast statistics.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_FARIMA_GOF(), NDK_FARIMA_RESID(), NDK_FARIMA_PARAM(), NDK_FARIMA_FITTED(), NDK_FARIMA_VALIDATE()\n */\n int __stdcall NDK_FARIMA_FORE( double* pData, size_t nSize, double mean, double sigma, double nIntegral, double* phis, size_t p, double* thetas, size_t q, size_t nStep , WORD retType, double* retVal);\n /*! \n * \\brief Returns an array of cells for the fitted values (i.e. mean, volatility and residuals)\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_FARIMA_GOF(), NDK_FARIMA_RESID(), NDK_FARIMA_PARAM(), NDK_FARIMA_FORE(), NDK_FARIMA_VALIDATE()\n */\n int __stdcall NDK_FARIMA_FITTED( double* pData, size_t nSize, double mean, double sigma, double nIntegral, double* phis, size_t p, double* thetas, size_t q, WORD retType);\n ///@}\n\n\n /// \\name SARIMA\n /// Seasonal ARIMA model functions\n /// @{\n\n /*! \n * \\brief Computes the log-likelihood ((LLF), Akaike Information Criterion (AIC) or other goodness of fit function of the SARIMA model.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 4. The maximum likelihood estimation (MLE) is a statistical method for fitting a model to the data and provides estimates for the model's parameters.\n * \\note 5. The long-run mean argument (mean) can take any value or be omitted, in which case a zero value is assumed.\n * \\note 6. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed to be zero.\n * \\note 7. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed to be zero.\n * \\note 8. The season length - s - is optional and can be omitted, in which case s is assumed to be zero (i.e. plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMA_RESID(), NDK_SARIMA_PARAM(), NDK_SARIMA_FORE(), NDK_SARIMA_FITTED(), NDK_SARIMA_VALIDATE()\n */\n int __stdcall NDK_SARIMA_GOF( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n GOODNESS_OF_FIT_FUNC retType, ///< [in] is a switch to select a fitness measure ( see \\ref #GOODNESS_OF_FIT_FUNC).\n double* retVal ///< [out] is the calculated goodness of fit value.\n );\n /*! \n * \\brief Returns the quick guess, optimal (calibrated) or std. errors of the values of model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean argument (mean) can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 5. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed to be zero.\n * \\note 6. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed to be zero.\n * \\note 7. The season length - s - is optional and can be omitted, in which case s is assumed to be zero (i.e. plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMA_GOF(), NDK_SARIMA_RESID(), NDK_SARIMA_FORE(), NDK_SARIMA_FITTED(), NDK_SARIMA_VALIDATE()\n */\n int __stdcall NDK_SARIMA_PARAM( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double* mean, ///< [inout] is the mean of the ARMA process.\n double* sigma, ///< [inout] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [inout] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [inout] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [inout] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [inout] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n MODEL_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref #MODEL_RETVAL_FUNC).\n size_t maxIter ///< [in] is the maximum number of iterations used to calibrate the model. If missing or less than 100, the default maximum of 100 is assumed.\n );\n\n\n /*! \n * \\brief Returns the initial (non-optimal), optimal or standard errors of the model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. SARIMA_SIM returns an array of one simulation path starting from the end of the input data.\n * \\note 3. The time series may include missing values (e.g. NaN) at either end.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 5. The input data argument (i.e. latest observations) is optional. If omitted, an array of zeroes is assumed.\n * \\note 6. The long-run mean argument (mean) can take any value or be omitted, in which case a zero value is assumed.\n * \\note 7. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed to be zero.\n * \\note 8. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed to be zero.\n * \\note 9. The season length - s - is optional and can be omitted, in which case s is assumed to be zero (i.e. Plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMA_GOF(), NDK_SARIMA_RESID(), NDK_SARIMA_FORE(), NDK_SARIMA_FITTED(), NDK_SARIMA_VALIDATE()\n */\n int __stdcall NDK_SARIMA_SIM( double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n size_t nSeed, ///< [in] is an unsigned integer for setting up the random number generators.\n double* retVal, ///< [out] is the simulated value.\n size_t nStep ///< [in] is the simulation time/horizon (expressed in terms of steps beyond end of the time series).\n );\n /*! \n * \\brief Calculates the out-of-sample conditional forecast (i.e. mean, error, and confidence interval).\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean argument (mean) can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 5. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed to be zero.\n * \\note 6. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed to be zero.\n * \\note 7. The season length - s - is optional and can be omitted, in which case s is assumed to be zero (i.e. plain ARIMA). \n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMA_GOF(), NDK_SARIMA_RESID(), NDK_SARIMA_PARAM(), NDK_SARIMA_FITTED(), NDK_SARIMA_VALIDATE()\n */\n int __stdcall NDK_SARIMA_FORE(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n size_t nStep, ///< [in] is the forecast time/horizon (expressed in terms of steps beyond end of the time series).\n FORECAST_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned (see \\ref #FORECAST_RETVAL_FUNC).\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n double* retVal ///< [out] is the calculated forecast value.\n );\n /*! \n * \\brief Returns the in-sample model fitted values of the conditional mean, volatility or residuals.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean argument (mean) can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 5. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed to be zero.\n * \\note 6. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed to be zero.\n * \\note 7. The season length - s - is optional and can be omitted, in which case s is assumed to be zero (i.e. plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMA_GOF(), NDK_SARIMA_RESID(), NDK_SARIMA_PARAM(), NDK_SARIMA_FORE(), NDK_SARIMA_VALIDATE()\n */\n int __stdcall NDK_SARIMA_FITTED(double* pData, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n FIT_RETVAL_FUNC retType ///< [in] is a switch to select a output type ( see \\ref #FIT_RETVAL_FUNC).\n );\n /*! \n * \\brief Examines the model's parameters for stability constraints (e.g. stationarity, invertibility, causality, etc.).\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 4. The long-run mean argument (mean) can take any value or be omitted, in which case a zero value is assumed.\n * \\note 5. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed to be zero.\n * \\note 6. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed to be zero.\n * \\note 7. The season length - s - is optional and can be omitted, in which case s is assumed to be zero (i.e. plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMA_GOF(), NDK_SARIMA_RESID(), NDK_SARIMA_PARAM(), NDK_SARIMA_FORE(), NDK_SARIMA_FITTED()\n */\n int __stdcall NDK_SARIMA_VALIDATE(double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ ///< [in] is the order of the seasonal MA component.\n );\n ///@}\n\n /// \\name AirLine\n ///AirLine model functions\n /// @{\n\n /*! \n * \\brief Computes the log-likelihood ((LLF), Akaike Information Criterion (AIC) or other goodness of fit functions of the AirLine model.\n * \n * \\note 1. The Airline model is a special case of multiplicative seasonal ARIMA model, and it assumes independent and normally distributed residuals with constant variance.\n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_RESID(), NDK_AIRLINE_PARAM(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED(), NDK_AIRLINE_VALIDATE()\n */\n int __stdcall NDK_AIRLINE_GOF(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the model mean (i.e. \\f$\\mu\\f$).\n double sigma, ///< [in] is the standard deviation (\\f$\\sigma\\f$) of the model's residuals/innovations.\n WORD S, ///< [in] is the length of seasonality (expressed in terms of lags, where s > 1).\n double theta, ///< [in] is the coefficient of first-lagged innovation (\\f$\\theta\\f$)(see model description).\n double theta2, ///< [in] is the coefficient of s-lagged innovation (\\f$\\Theta\\f$) (see model description).\n GOODNESS_OF_FIT_FUNC retType, ///< [in] is a switch to select a fitness measure ( see \\ref #GOODNESS_OF_FIT_FUNC).\n double* retVal ///< [out] is the calculated value of the goodness of fit.\n );\n\n /*! \n * \\brief Returns an array of cells for the standardized residuals of a given AirLine model.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\deprecated this function is being replaced by NDK_AIRLINE_FITTED()\n * \\sa NDK_AIRLINE_GOF(), NDK_AIRLINE_PARAM(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED(), NDK_AIRLINE_VALIDATE()\n */\n int __stdcall NDK_AIRLINE_RESID( double* pData, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD S, ///< [in] is the length of seasonality (expressed in terms of lags, where s > 1).\n double theta, ///< [in] is the coefficient of first-lagged innovation (see model description).\n double theta2, ///< [in] is the coefficient of s-lagged innovation (see model description).\n RESID_RETVAL_FUNC retType ///< [in] is a switch to select a residuals-type:raw or standardized. see \\ref #RESID_RETVAL_FUNC.\n );\n\n\n /*! \n * \\brief Returns the initial/quick guess of the model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_GOF(), NDK_AIRLINE_RESID(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED(), NDK_AIRLINE_VALIDATE()\n */\n int __stdcall NDK_AIRLINE_PARAM( double* pData, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double* mean, ///< [inout] is the model mean (i.e. mu).\n double* sigma, ///< [inout] is the standard deviation of the model's residuals/innovations.\n WORD S, ///< [in] is the length of seasonality (expressed in terms of lags, where s > 1).\n double* theta, ///< [inout] is the coefficient of first-lagged innovation (see model description).\n double* theta2, ///< [inout] is the coefficient of s-lagged innovation (see model description.\n MODEL_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref #MODEL_RETVAL_FUNC).\n size_t maxIter ///< [in] is the maximum number of iterations used to calibrate the model. If missing or less than 100, the default maximum of 100 is assumed. \n );\n\n /*! \n * \\brief Calculates the out-of-sample forecast statistics.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean argument (mean) can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 5. The season length must be greater than one.\n * \\note 6. The input argument for the non-seasonal MA parameter - theta - is optional and can be omitted, in which case no non-seasonal MA component is included.\n * \\note 7. The input argument for the seasonal MA parameter - theta2 - is optional and can be omitted, in which case no seasonal MA component is included.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_GOF(), NDK_AIRLINE_RESID(), NDK_AIRLINE_PARAM(), NDK_AIRLINE_FITTED(), NDK_AIRLINE_VALIDATE()\n */\n int __stdcall NDK_AIRLINE_FORE( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD S, ///< [in] is the length of seasonality (expressed in terms of lags, where s > 1).\n double theta, ///< [in] is the coefficient of first-lagged innovation (see model description).\n double theta2, ///< [in] is the coefficient of s-lagged innovation (see model description).\n size_t nStep, ///< [in] is the forecast time/horizon (expressed in terms of steps beyond end of the time series).\n FORECAST_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned (see \\ref #FORECAST_RETVAL_FUNC).\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n double* retVal ///< [out] is the calculated forecast value.\n );\n\n /*! \n * \\brief Calculates the out-of-sample conditional mean forecast.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The input data argument (i.e. latest observations) is optional. If omitted, a value of zero is assumed.\n * \\note 3. The time series may include missing values (e.g. NaN) at either end.\n * \\note 4. The \\f$\\epsilon\\f$ are normally distributed with mean zero and unit standard deviation.\n * \\note 5. The long-run mean argument (mean) can take any value or be omitted, in which case a zero value is assumed.\n * \\note 6. The value of the residuals/innovations standard deviation (sigma) must be positive.\n * \\note 7. The season length must be greater than one.\n * \\note 8. The input argument for the non-seasonal MA parameter - theta - is optional and can be omitted, in which case no non-seasonal MA component is included.\n * \\note 9. The input argument for the seasonal MA parameter - theta2 - is optional and can be omitted, in which case no seasonal MA component is included. \n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_VALIDATE(), NDK_AIRLINE_GOF(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED(), NDK_AIRLINE_PARAM()\n */\n int __stdcall NDK_AIRLINE_SIM( double* pData, ///< [in] is a univariate time series of the initial values (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD S, ///< [in] is the length of seasonality (expressed in terms of lags, where s > 1).\n double theta, ///< [in] is the coefficient of first-lagged innovation (see model description).\n double theta2, ///< [in] is the coefficient of s-lagged innovation (see model description).\n UINT nSeed, ///< [in] is an unsigned integer for setting up the random number generators.\n double* retArray, ///< [out] is the calculated simulation value.\n size_t nSteps ///< [in] is the number of future steps to simulate for.\n );\n\n\n\n /*! \n * \\brief Returns the fitted values of the conditional mean.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run mean argument (mean) can take any value or be omitted, in which case a zero value is assumed.\n * \\note 4. The season length must be greater than one.\n * \\note 5. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 6. The input argument for the non-seasonal MA parameter - theta - is optional and can be omitted, in which case no non-seasonal MA component is included.\n * \\note 7. The input argument for the seasonal MA parameter - theta2 - is optional and can be omitted, in which case no seasonal MA component is included.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_GOF(), NDK_AIRLINE_RESID(), NDK_AIRLINE_PARAM(), NDK_AIRLINE_FORE(), NDK_AIRLINE_VALIDATE()\n */\n int __stdcall NDK_AIRLINE_FITTED( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD S, ///< [in] is the length of seasonality (expressed in terms of lags, where s > 1).\n double theta, ///< [in] is the coefficient of first-lagged innovation (see model description).\n double theta2, ///< [in] is the coefficient of s-lagged innovation (see model description).\n FIT_RETVAL_FUNC retType ///< [in] is a switch to select a output type ( see \\ref #FIT_RETVAL_FUNC).\n );\n\n /*! \n * \\brief Examines the model's parameters for stability constraints (e.g. stationarity, etc.).\n * \n * \\note 1. The Airline model is a special case of multiplicative seasonal ARIMA model, and it assumes independent and normally distributed residuals with constant variance.\n * \\note 2. The time series is homogeneous or equally spaced.\n * \\note 3. The time series may include missing values (e.g. NaN) at either end.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_GOF(), NDK_AIRLINE_RESID(), NDK_AIRLINE_PARAM(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED()\n */\n int __stdcall NDK_AIRLINE_VALIDATE( double mean, ///< [in] is the model mean (i.e. mu).\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD S, ///< [in] is the length of seasonality (expressed in terms of lags, where s > 1).\n double theta, ///< [in] is the coefficient of first-lagged innovation (see model description).\n double theta2 ///< [in] is the coefficient of s-lagged innovation (see model description).\n );\n ///@}\n\n /// \\name X12-ARIMA\n /// Seasonal ajustments using X12-ARIMA API functions calls\n /// @{\n\n /*! \n * \\brief Initialize the filesystem environment on the local machine for the current user \n * \n * \\note 1. This function creates a subfolder under the current user local profile for X12ARIMA models, and copy all the scripts needed to run the x12a program.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_ENV_INIT(BOOL override ///< [in] is a boolean flag to wipe our existing files and copy new ones.\n );\n /*! \n * \\brief Finalize the X12A environment and release any resources allocated \n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_ENV_CLEANUP(void);\n\n /*! \n * \\brief Initialize the required files for the given scenario/model\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_SCEN_INIT(LPCTSTR szScenarioName, ///< [in] is the scenario name, must be unique\n LPVOID X12Options ///< [in] (optional) is an instance of #X12ARIMA_OPTIONS structure with all X12 model options.\n );\n /*! \n * \\brief Finalize the given scenario/model and free allocated resources\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_SCEN_CLEAUP(LPCTSTR szScenarioName /*!< [in] is the scenario name or the model unique identifier */);\n\n\n /*! \n * \\brief Write the given data into an X12a formatted data file\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_DATA_FILE( LPCTSTR szScenarioName, \n double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nLen, ///< [in] is the number of observations in X\n BOOL monthly, ///< [in] is a boolean flag for whether the data is monthly/quartelry sampled.\n LONG startDate, ///< [in] is the serial date number of the 1st observation in the series\n WORD reserved ///< [in] is a reserved argument for future releases. must be set to 1\n );\n /*! \n * \\brief Create or updates the x12a specification file using the options selected \n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_SPC_FILE(LPCTSTR szScenarioName, LPVOID X12Options);\n\n /*! \n * \\brief Run a batch file in x12a environment\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_RUN_BATCH(LPCTSTR szScenarioName, LPCTSTR szBatchFile, LPWORD status);\n\n /*! \n * \\brief Run a x12a program for the given model or scenrio\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_RUN_SCENARIO(LPCTSTR szScenarioName, LPWORD status);\n\n /*! \n * \\brief Read the status file generated by x12a program\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_RUN_STAT(LPCTSTR szScenarioName, LPWORD status, LPTSTR szMsg, size_t* nLen);\n\n /*! \n * \\brief Return the full path of the output file generated by x12a program\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_OUT_FILE(LPCTSTR szScenarioName, ///< [in] is the scenaio.model name\n WORD retType, ///< [in] is a switch to designate the desired specific output file.\n /// 0. The X12 specification file (*.spc)\n /// 1. The X12 log file\n /// 2. The output file\n /// 3. The error file\n LPTSTR szOutFile, ///< [out] is a buffer to hold the return full path\n size_t* nLen, ///< [inout] is the length of the szOutFile. Upon return, this argument stores the actual number of bytes used.\n BOOL OpenFileFlag ///< [in] is a switch to instruct the functiona whether it should open the file using system default editor (e.g. notepad)\n );\n /*! \n * \\brief Read the output time series (e.g. seasonal adjusted data) generated by x12a program\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X12_OUT_SERIES(LPCTSTR szScenarioName, ///< [in] is the given scenario/model\n WORD nComponent, ///< [in] is the desired output of the X12a output\n /// 1. Final seasonal factors (d11)\n /// 2. final trend-cycle (d12)\n /// 3. final irregular component (d13)\n /// 4. final seasonal factors (d10)\n /// 5. combined holiday and trading day factors (d18)\n /// 6. combined seasonal and trading day factors (d16)\n double* pData, ///< [out] is the output buffer to hold the data series\n size_t* nLen ///< [inout] is the original size of the output buffer. Upon return, nLen will have the actual number of data copied.\n );\n\n /*! \n * \\brief Read the output forecaste series generated by x12a program\n * \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_SCEN_CLEAUP(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES()\n */\n int __stdcall NDK_X12_FORE_SERIES( LPCTSTR szScenarioName, ///< [in] is the given X12-ARIMA scenario/model identifier\n size_t nStep, ///< [in] is the forecast horizon\n WORD retType, ///< [in] is the switch to designate desired output\n /// 1. Mean\n /// 2. Lower limit value of the conficent interval\n /// 3. Upper limit value of the confidence interval\n double* pData ///< [out] is the forecast output value\n );\n ///@}\n\n\n /// \\name X13ARIMA-SEATS\n /// X13ARIMA-SEATS model functions\n /// @{\n\n /*!\n * \\brief Initialize the filesystem environment on the local machine for the current user\n *\n * \\note 1. This function creates a subfolder under the current user local profile for X13ARIMA models, and copy all the scripts needed to run the x13as program.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X13_ENV_CLEANUP()\n */\n int __stdcall NDK_X13_ENV_INIT(BOOL override ///< [in] is a boolean flag to wipe our existing files and copy new ones.\n );\n\n /*!\n * \\brief Finalize the X13AS environment and release any resources allocated\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X13_ENV_INIT()\n */\n int __stdcall NDK_X13_ENV_CLEANUP(void);\n\n /*!\n * \\brief Initialize the required files for the given scenario/model\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X13_ENV_INIT(), NDK_X13_ENV_CLEANUP(), NDK_X13_SCEN_CLEAUP()\n */\n int __stdcall NDK_X13_SCEN_INIT(LPCTSTR szScenarioName, ///< [in] is the scenario name, must be unique\n LPVOID X13Options ///< [in] (optional) is an instance of #X13ARIMA_OPTIONS structure with all X13 model options.\n );\n\n\n /*!\n * \\brief reconstruct the different (input/intermediate/output) files\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X13_ENV_INIT(), NDK_X13_ENV_CLEANUP(), NDK_X13_SCEN_CLEAUP()\n */\n int __stdcall NDK_X13_SCEN_REFRESH(LPCTSTR szScenarioName);\n\n /*!\n * \\brief Finalize the given scenario/model and free allocated resources\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X12_ENV_INIT(), NDK_X12_ENV_CLEANUP(), NDK_X12_SCEN_INIT(), NDK_X12_DATA_FILE(), NDK_X12_SPC_FILE(), NDK_X12_RUN_BATCH(), NDK_X12_RUN_SCENARIO(), NDK_X12_RUN_STAT(), NDK_X12_OUT_FILE(), NDK_X12_OUT_SERIES(), NDK_X12_FORE_SERIES()\n */\n int __stdcall NDK_X13_SCEN_CLEAUP(LPCTSTR szScenarioName /*!< [in] is the scenario name or the model unique identifier */\n );\n\n\n /*!\n * \\brief Write the given data into an X13as formatted data file\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X13_ENV_INIT(), NDK_X13_ENV_CLEANUP(), NDK_X13_SCEN_INIT(), NDK_X13_SCEN_CLEAUP()\n */\n int __stdcall NDK_X13_DATA_FILE(LPCTSTR szScenarioName, LPCTSTR szOutputFile,\n double* X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nLen, ///< [in] is the number of observations in X\n BOOL monthly, ///< [in] is a boolean flag for whether the data is monthly/quartelry sampled.\n LONG startDate, ///< [in] is the serial date number of the 1st observation in the series\n WORD reserved ///< [in] is a reserved argument for future releases. must be set to 1\n );\n\n /*!\n * \\brief Write the actual holidays dates into an genhol formatted data file\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X13_ENV_INIT(), NDK_X13_ENV_CLEANUP(), NDK_X13_SCEN_INIT(), NDK_X13_SCEN_CLEAUP()\n */\n int __stdcall NDK_X13_HOLIDAY_FILE( LPCTSTR szScenarioName, ///< [in] is the scenario name or the model unique identifier\n LPCTSTR szHoliday, ///< [in] is the holiday code (unique identifier) to get dates for.\n LONG startDate, ///< [in] is the serial date number of the beginning of the search interval\n LONG endDate ///< [in] is the serial date number of the end of the search interval\n );\n\n\n /*!\n * \\brief Write the (user) holidays dates into an genhol formatted data file\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_X13_HOLIDAY_FILE(), NDK_X13_SCEN_INIT(), NDK_X13_SCEN_CLEAUP()\n */\n int __stdcall NDK_X13_USER_EVENT_FILE(LPCTSTR szScenarioName, ///< [in] is the scenario name or the model unique identifier\n LPCTSTR szName, ///< [in] is the user-defined name (unique identifier) of the event.\n PLONG holidays, ///< [in] is an array of dates serial numbers.\n size_t nLen ///< [in] is the number of elements in the [holidays] array.\n );\n\n\n int __stdcall NDK_X13_ADD_EVENT_FACTOR( LPCTSTR szScenarioName, ///< [in] is the scenario name or the model unique identifier\n LPCTSTR szName, ///< [in] is the user-defined name (unique identifier) of the event.\n PLONG holidays, ///< [in] (optional, uer-defined only) is an array of dates serial numbers.\n size_t nLen, ///< [in] (optional, uer-defined only) is the number of elements in the [holidays] array.\n double begbefore, ///< [in] Denotes the position relative to the holiday of the beginning of the window used to generate the before-holiday regressor. This value should be negative, and less than or equal to the value for the endbefore argument. The minimum value that can be specified is -42.\n double endbefore, ///< [in] Denotes the position relative to the holiday of the end of the window used to generate the before-holiday regressor. This value should be negative.\n double begAfter, ///< [in] Denotes the position relative to the holiday of the beginning of the window used to generate the after-holiday regressor. Since this effect occurs after the holiday, the value should be non-negative.\n double endAfter, ///< [in] Denotes the position relative to the holiday of the end of the window used to generate the after-holiday regressor. This value should be positive, and greater than or equal to the value for the begafter argument. The maximum value that can be specified is 49\n double zeroBefore, ///< [in] Defines the year before which all values in the regressor are set to be zero. If this argument is set, first < zerobefore <= last, and if zeroafter is set, then zerobefore < zeroafter.\n double zeroAfter, ///< [in] Defines the year on or after which all values in the regressor are set to be zero. If this argument is set, first < zeroafter <= last, and if zeroafter is set, then zerobefore < zeroafter.\n WORD wCenter ///< [in] Specifies the removal of the (sample) mean or the seasonal means from the user-defined regression variables.\n ///< 0 = None, 1=mean, 2=calendar (only with ratio type of data)\n );\n\n\n int __stdcall NDK_X13_REGRESSORS_SETTING( LPCTSTR szScenarioName, ///< [in] is the scenario name or the model unique identifier\n double dwFirstYear,\n double dwLastYear,\n double dwFirstMeanYear,\n double dwLastMeanYear,\n DWORD dwPeriod,\n BOOL bRatio,\n double dwStockDay\n );\n \n int __stdcall NDK_X13_RUN_GENHOL(LPCTSTR szScenarioName);\n int __stdcall NDK_X13_RUN_BATCH(LPCTSTR szScenarioName, LPCTSTR szBatchFile, LPWORD status);\n int __stdcall NDK_X13_SPC_SERIES_SETTING(LPCTSTR szScenarioName, LPCTSTR szSeriesName, double* pData, size_t nLen, BOOL stock, BOOL monthly, LONG startDate, WORD fileType);\n int __stdcall NDK_X13_SPC_TRANSFORM_SETTING(LPCTSTR szScenarioName, X13TRANSFORM_METHOD zTransform, double zPower);\n int __stdcall NDK_X13_SPC_PRIOR_ADJUST_SETTING( LPCTSTR szScenarioName, BOOL lom, BOOL loq, BOOL leapYear,\n double* pTempData, size_t nTempLen, LONG zTempStartDate, X13PRIORADJUST_TYPE nTempDataType,\n double* pPermData, size_t nPermLen, LONG zPermStartDate, X13PRIORADJUST_TYPE nPermDataType);\n int __stdcall NDK_X13_SPC_X11_SETTING(LPCTSTR szScenarioName, BOOL enable, X11_MODE_TYPE mode, X11_SEASONALMA_TYPE seasonalma, int trendma, double sigmaLL, double sigmaUL);\n int __stdcall NDK_X13_SPC_SEATS_SETTING(LPCTSTR szScenarioName, BOOL enable, BOOL hpCycle, BOOL infiniteFilter, BOOL bAdmissableCompositionApprox, BOOL bAcceptSeasonStationary, double maxLBQStat);\n int __stdcall NDK_X13_WRITE_SPC_FILE(LPCTSTR szScenarioName);\n int __stdcall NDK_X13_RUN_SPC_FILE(LPCTSTR szScenarioName);\n int __stdcall NDK_X13AS_OUT_FILE(LPCTSTR szScenarioName, WORD retType, LPTSTR szOutFile, size_t* nLen, BOOL OpenFileFlag);\n int __stdcall NDK_X13AS_OUT_SERIES(LPCTSTR szScenarioName, LPCTSTR szComponent, double* pData, size_t* nLen);\n\n ///@}\n\n /// \\name SARIMAX\n /// Seasonal ARIMA-X model functions\n /// @{\n\n /*! \n * \\brief Computes the log-likelihood ((LLF), Akaike Information Criterion (AIC) or other goodness of fit functions of the SARIMA-X model.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 4. The maximum likelihood estimation (MLE) is a statistical method for fitting a model to the data and provides estimates for the model's parameters.\n * \\note 5. The intercept or the regression constant term input argument is optional. If omitted, a zero value is assumed. \n * \\note 6. The long-run mean argumen (mean) of the differenced regression residuals can take any value. If omitted, a zero value is assumed.\n * \\note 7. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed zero.\n * \\note 8. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed zero.\n * \\note 9. The season length - s - is optional and can be omitted, in which case s is assumed zero (i.e. Plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMAX_FITTED(), NDK_SARIMAX_PARAM(), NDK_SARIMAX_FORE(), NDK_SARIMAX_FORE(), NDK_SARIMAX_VALIDATE()\n */\n int __stdcall NDK_SARIMAX_GOF(double* pData, ///< [in] is the response univariate time series data (a one dimensional array).\n double** pFactors, ///< [in] is the exogneous factors time series data (each column is a separate factor, and each row is an observation).\n size_t nSize, ///< [in] is the number of observations.\n size_t nFactors, ///< [in] is the number of exognous factors.\n double* fBetas, ///< [in] is the weights or loading of the exogneous factors.\n double mean, ///< [in] is the ARIMA/SARIMA model's long-run mean/trend (i.e. mu). If missing (i.e. NaN), then it is assumed zero.\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n GOODNESS_OF_FIT_FUNC retType, ///< [in] is a switch to select a fitness measure ( see \\ref #GOODNESS_OF_FIT_FUNC).\n double* retVal ///< [out] is the calculated goodness of fit value.\n );\n\n\n /*! \n * \\brief Examines the model's parameters for stability constraints (e.g. causality, invertability, stationary, etc.).\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The intercept or the regression constant term input argument is optional. If omitted, a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 5. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed zero.\n * \\note 6. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed zero.\n * \\note 7. The season length - s - is optional and can be omitted, in which case s is assumed zero (i.e. Plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMA_GOF(), NDK_SARIMA_RESID(), NDK_SARIMA_PARAM(), NDK_SARIMA_FORE(), NDK_SARIMA_FITTED()\n */\n int __stdcall NDK_SARIMAX_VALIDATE(double mean, ///< [in] is the model mean (i.e. mu) for the differenced series.\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ ///< [in] is the order of the seasonal MA component.\n );\n\n /*! \n * \\brief Returns the in-sample model fitted values of the conditional mean, volatility or residuals.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The intercept or the regression constant term input argument is optional. If omitted, a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 5. The long-run mean argument (mean) of the differenced regression residuals can take any value. If omitted, a zero value is assumed. \n * \\note 6. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed zero.\n * \\note 7. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed zero.\n * \\note 8. The season length - s - is optional and can be omitted, in which case s is assumed zero (i.e. Plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMAX_GOF(), NDK_SARIMAX_RESID(), NDK_SARIMAX_PARAM(), NDK_SARIMAX_FORE(), NDK_SARIMAX_VALIDATE()\n */\n int __stdcall NDK_SARIMAX_FITTED( double* pData, ///< [inout] is the univariate time series data (a one dimensional array).\n double** pFactors, ///< [in] is the exogneous factors time series data (each column is a separate factor, and each row is an observation).\n size_t nSize, ///< [in] is the number of observations.\n size_t nFactors, ///< [in] is the number of exognous factors.\n double* fBetas, ///< [in] is the weights or loading of the exogneous factors.\n double mean, ///< [in] is the ARIMA/SARIMA model's long-run mean/trend (i.e. mu). If missing (i.e. NaN), then it is assumed zero.\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n FIT_RETVAL_FUNC retType ///< [in] is a switch to select a output type ( see \\ref #FIT_RETVAL_FUNC).\n );\n\n\n /*! \n * \\brief Returns the quick guess, optimal (calibrated) or std. errors of the values of model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The intercept or the regression constant term input argument is optional. If omitted, a zero value is assumed.\n * \\note 4. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 5. The long-run mean argument (mean) of the differenced regression residuals can take any value. If omitted, a zero value is assumed.\n * \\note 6. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed zero.\n * \\note 7. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed zero.\n * \\note 8. The season length - s - is optional and can be omitted, in which case s is assumed zero (i.e. Plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMAX_GOF(), NDK_SARIMAX_RESID(), NDK_SARIMAX_FORE(), NDK_SARIMAX_FITTED(), NDK_SARIMAX_VALIDATE()\n */\n int __stdcall NDK_SARIMAX_PARAM( double* pData, ///< [inout] is the univariate time series data (a one dimensional array).\n double** pFactors, ///< [in] is the exogneous factors time series data (each column is a separate factor, and each row is an observation).\n size_t nSize, ///< [in] is the number of observations.\n size_t nFactors, ///< [in] is the number of exognous factors.\n double* fBetas, ///< [inout] is the weights or loading of the exogneous factors.\n double* mean, ///< [inout] is the mean of the differenced time series process.\n double* sigma, ///< [inout] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [inout] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [inout] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [inout] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [inout] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n MODEL_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref #MODEL_RETVAL_FUNC).\n size_t maxIter ///< [in] is the maximum number of iterations used to calibrate the model. If missing or less than 100, the default maximum of 100 is assumed.\n );\n\n\n /*! \n * \\brief Calculates the out-of-sample forecast statistics.\n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 4. The exogneous factors input are expected to have at least n-more observations than the reponse variable.\n * \\note 5. The intercept or the regression constant term input argument is optional. If omitted, a zero value is assumed.\n * \\note 6. The long-run mean argument (mean) of the differenced regression residuals can take any value. If omitted, a zero value is assumed.\n * \\note 7. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed zero.\n * \\note 8. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed zero.\n * \\note 9. The season length - s - is optional and can be omitted, in which case s is assumed zero (i.e. Plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMA_GOF(), NDK_SARIMA_RESID(), NDK_SARIMA_PARAM(), NDK_SARIMA_FITTED(), NDK_SARIMA_VALIDATE()\n */\n int __stdcall NDK_SARIMAX_FORE(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n double** pFactors, ///< [in] is the exogneous factors time series data (each column is a separate factor, and each row is an observation).\n size_t nSize, ///< [in] is the number of observations.\n size_t nFactors, ///< [in] is the number of exognous factors.\n double* fBetas, ///< [inout] is the weights or loading of the exogneous factors.\n double mean, ///< [inout] is the mean of the ARMA process.\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n size_t nStep, ///< [in] is the forecast time/horizon (expressed in terms of steps beyond end of the time series).\n FORECAST_RETVAL_FUNC retType, ///< [in] is a switch to select the type of value returned (see \\ref #FORECAST_RETVAL_FUNC).\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed.\n double* retVal ///< [out] is the calculated forecast value.\n );\n\n\n /*! \n * \\brief Calculates the out-of-sample simulated values. \n * \n * \\note 1. The time series is homogeneous or equally spaced.\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The residuals/innovations standard deviation (i.e.\\f$\\sigma\\f$) should be greater than zero.\n * \\note 4. The intercept or the regression constant term input argument is optional. If omitted, a zero value is assumed.\n * \\note 5. The exogenous factors input are expected to have at least n-more observations than the reponse variable.\n * \\note 6. The long-run mean argument (mean) of the differenced regression residuals can take any value. If omitted, a zero value is assumed.\n * \\note 7. The non-seasonal integration order - d - is optional and can be omitted, in which case d is assumed zero.\n * \\note 8. The seasonal integration order - sD - is optional and can be omitted, in which case sD is assumed zero.\n * \\note 9. The season length - s - is optional and can be omitted, in which case s is assumed zero (i.e. Plain ARIMA).\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_SARIMA_GOF(), NDK_SARIMA_RESID(), NDK_SARIMA_FORE(), NDK_SARIMA_FITTED(), NDK_SARIMA_VALIDATE()\n */\n int __stdcall NDK_SARIMAX_SIM(double* fBetas, ///< [inout] is the weights or loading of the exogneous factors.\n size_t nFactors, ///< [in] is the number of exognous factors.\n double mean, ///< [inout] is the mean of the ARMA process.\n double sigma, ///< [in] is the standard deviation of the model's residuals/innovations.\n WORD nIntegral, ///< [in] is the non-seasonal difference order.\n double* phis, ///< [in] are the coefficients's values of the non-seasonal AR component.\n size_t p, ///< [in] is the order of the non-seasonal AR component.\n double* thetas, ///< [in] are the coefficients's values of the non-seasonal MA component.\n size_t q, ///< [in] is the order of the non-seasonal MA component.\n WORD nSIntegral, ///< [in] is the seasonal difference.\n WORD nSPeriod, ///< [in] is the number of observations per one period (e.g. 12=Annual, 4=Quarter).\n double* sPhis, ///< [in] are the coefficients's values of the seasonal AR component.\n size_t sP, ///< [in] is the order of the seasonal AR component.\n double* sThetas, ///< [in] are the coefficients's values of the seasonal MA component.\n size_t sQ, ///< [in] is the order of the seasonal MA component.\n double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n double** pFactors, ///< [in] is the past exogneous factors time series data (each column is a separate factor, and each row is an observation).\n size_t nSize, ///< [in] is the number of observations in X. \n UINT nSeed, ///< [in] is an unsigned integer for setting up the random number generators.\n size_t nStep, ///< [in] is the simulation time/horizon (expressed in terms of steps beyond end of the time series).\n double* retVal ///< [out] is the simulated value.\n );\n\n\n ///@}\n\n\n /// \\name GARCH\n ///GARCH Functions\n /// @{\n\n /*! \n * \\brief Computes the log-likelihood ((LLF), Akaike Information Criterion (AIC) or other goodness of fit function of the GARCH model. \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_RESID(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCH_GOF(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType, ///< [in] is a switch to select a fitness measure ( see \\ref #GOODNESS_OF_FIT_FUNC)\n double* retVal ///< [out] is the calculated goodness of fit value. \n );\n /*! \n * \\brief Returns an array of cells for the standardized residuals of a given GARCH model \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\deprecated this function is being replaced by NDK_GARCH_FITTED()\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCH_RESID(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType ///< [in] is a switch to select a residuals-type:raw or standardized. see \\ref #RESID_RETVAL_FUNC\n );\n /*! \n * \\brief Returns an array of cells for the initial (non-optimal), optimal or standard errors of the model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_RESID(), NDK_GARCH_FORE(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCH_PARAM(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double* mu, ///< [inout] is the GARCH model conditional mean (i.e. mu). \n double* Alphas, ///< [inout] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n double* Betas, ///< [inout] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double* nu, ///< [inout] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref #MODEL_RETVAL_FUNC)\n size_t maxIter ///< [in] is the maximum number of iterations used to calibrate the model. If missing or less than 100, the default maximum of 100 is assumed. \n );\n /*! \n * \\brief Returns a simulated data series the underlying GARCH process.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_RESID(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCH_SIM(double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n double* pData, ///< [in] is the univariate time series of the latest observations (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double* sigmas, ///< [in] is the univariate time series of the latest observations (a one dimensional array of cells (e.g. rows or columns)) of the last q realized volatilities. \n size_t nSigmaSize, ///< [in] is the number of elements in sigmas. Only the latest q observations are used.\n UINT nSeed, ///< [in] is an unsigned integer for setting up the random number generators\n double* retArray, ///< [out] is the calculated simulation value\n size_t nSteps ///< [in] is the number of future steps to simulate for. \n );\n /*! \n * \\brief Calculates the out-of-sample forecast statistics.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_RESID(), NDK_GARCH_PARAM(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCH_FORE( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double* sigmas, ///< [in] is the univariate time series data (a one dimensional array of cells (e.g. rows or columns)) of the last q realized volatilities. \n size_t nSigmaSize, ///< [in] is the number of elements in sigmas. Only the latest q observations are used.\n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian or Normal Distribution \n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n size_t nStep , ///< [in] is the forecast time/horizon (expressed in terms of steps beyond end of the time series). \n WORD retType, ///< [in] is a switch to select the type of value returned\n /// 1. Mean forecast \n /// 2. Forecast Error\n /// 3. Volatility term structure\n /// 4. Confidence interval lower limit\n /// 5. Confidence interval upper limit \n /// (see \\ref #FORECAST_RETVAL_FUNC)\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n double* retVal ///< [out] is the calculated forecast value\n );\n /*! \n * \\brief Returns an array of cells for the fitted values (i.e. mean, volatility and residuals)\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_RESID(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCH_FITTED( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType ///< [in] is a switch to select a output type ( see \\ref #FIT_RETVAL_FUNC)\n );\n\n /*! \n * \\brief Calculates the long-run average volatility for the given GARCH model\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run variance is not affected by our choice of shock/innovation distribution\n * \\note 4. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 5. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_RESID(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCH_LRVAR( double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n double* retVal ///< [out] is the calculated long run value\n );\n /*! \n * \\brief Examines the model's parameters for stability constraints (e.g. variance stationary, positive variance, etc.). \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_TRUE Model is stable (i.e. variance process is stationary and yield positive values)\n * \\retval #NDK_FALSE Model is unstable.\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_GOF(), NDK_AIRLINE_RESID(), NDK_AIRLINE_PARAM(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED()\n */\n int __stdcall NDK_GARCH_VALIDATE(double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n );\n ///@}\n\n /// \\name EGARCH\n ///EGARCH Functions\n /// @{\n\n /*! \n * \\brief Computes the log-likelihood ((LLF), Akaike Information Criterion (AIC) or other goodness of fit function of the GARCH model. \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of gamma-coefficients must match the number of alpha-coefficients minus one.\n * \\note 5. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_EGARCH_RESID(), NDK_EGARCH_PARAM(), NDK_EGARCH_FORE(), NDK_EGARCH_FITTED(), NDK_EGARCH_VALIDATE()\n */\n int __stdcall NDK_EGARCH_GOF( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mu, ///< [in] is the EGARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Gammas, ///< [in] are the leverage parameters (starting with the lowest lag).\n size_t g, ///< [in] is the number of elements in Gammas. Must be equal to (p-1).\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType, ///< [in] is a switch to select a fitness measure ( see \\ref #GOODNESS_OF_FIT_FUNC)\n double* retVal ///< [out] is the calculated goodness of fit value. \n );\n /*! \n * \\brief Returns an array of cells for the standardized residuals of a given GARCH model \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of gamma-coefficients must match the number of alpha-coefficients minus one.\n * \\note 5. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\deprecated this function is being replaced by NDK_EGARCH_FITTED()\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_EGARCH_RESID( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mu, ///< [in] is the EGARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Gammas, ///< [in] are the leverage parameters (starting with the lowest lag).\n size_t g, ///< [in] is the number of elements in Gammas. Must be equal to (p-1).\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType ///< [in] is a switch to select a residuals-type:raw or standardized. see \\ref #RESID_RETVAL_FUNC\n );\n /*! \n * \\brief Returns an array of cells for the initial (non-optimal), optimal or standard errors of the model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of gamma-coefficients must match the number of alpha-coefficients minus one.\n * \\note 5. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_EGARCH_GOF(), NDK_EGARCH_RESID(), NDK_EGARCH_FORE(), NDK_EGARCH_FITTED(), NDK_EGARCH_VALIDATE()\n */\n int __stdcall NDK_EGARCH_PARAM( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double* mu, ///< [inout] is the EGARCH model conditional mean (i.e. mu). \n double* Alphas, ///< [inout] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n double* Gammas, ///< [inout] are the leverage parameters (starting with the lowest lag).\n size_t g, ///< [in] is the number of elements in Gammas. Must be equal to (p-1).\n double* Betas, ///< [inout] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double* nu, ///< [inout] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref #MODEL_RETVAL_FUNC)\n size_t maxIter ///< [in] is the maximum number of iterations used to calibrate the model. If missing or less than 100, the default maximum of 100 is assumed. \n );\n /*! \n * \\brief Returns a simulated data series the underlying EGARCH process.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of gamma-coefficients must match the number of alpha-coefficients minus one.\n * \\note 5. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_EGARCH_RESID(), NDK_EGARCH_PARAM(), NDK_EGARCH_FORE(), NDK_EGARCH_FITTED(), NDK_EGARCH_VALIDATE()\n */\n int __stdcall NDK_EGARCH_SIM( double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Gammas, ///< [in] are the leverage parameters (starting with the lowest lag).\n size_t g, ///< [in] is the number of elements in Gammas. Must be equal to (p-1).\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double* sigmas, ///< [in] is the univariate time series data (a one dimensional array of cells (e.g. rows or columns)) of the last q realized volatilities. \n size_t nSigmaSize, ///< [in] is the number of elements in sigmas. Only the latest q observations are used.\n UINT nSeed, ///< [in] is an unsigned integer for setting up the random number generators\n double* retArray, ///< [out] is the calculated simulation value\n size_t nSteps ///< [in] is the number of future steps to simulate for. \n );\n /*! \n * \\brief Calculates the out-of-sample forecast statistics.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of gamma-coefficients must match the number of alpha-coefficients minus one.\n * \\note 5. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_EGARCH_GOF(), NDK_EGARCH_RESID(), NDK_EGARCH_PARAM(), NDK_EGARCH_FITTED(), NDK_EGARCH_VALIDATE()\n */\n int __stdcall NDK_EGARCH_FORE(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double* sigmas, ///< [in] is the univariate time series data (a one dimensional array of cells (e.g. rows or columns)) of the last q realized volatilities. \n size_t nSigmaSize, ///< [in] is the number of elements in sigmas. Only the latest q observations are used.\n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Gammas, ///< [inout] are the leverage parameters (starting with the lowest lag).\n size_t g, ///< [in] is the number of elements in Gammas. Must be equal to (p-1).\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n size_t nStep , ///< [in] is the forecast time/horizon (expressed in terms of steps beyond end of the time series). \n WORD retType, ///< [in] is a switch to select the type of value returned\n /// 1. Mean forecast \n /// 2. Forecast Error\n /// 3. Volatility term structure\n /// 4. Confidence interval lower limit\n /// 5. Confidence interval upper limit \n /// (see \\ref #FORECAST_RETVAL_FUNC)\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n double* retVal ///< [out] is the simulated value for the GARCH process.\n );\n /*! \n * \\brief Returns an array of cells for the fitted values (i.e. mean, volatility and residuals)\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of gamma-coefficients must match the number of alpha-coefficients minus one.\n * \\note 5. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_RESID(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_EGARCH_FITTED( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Gammas, ///< [in] are the leverage parameters (starting with the lowest lag).\n size_t g, ///< [in] is the number of elements in Gammas. Must be equal to (p-1).\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType ///< [in] is a switch to select a output type ( see \\ref #FIT_RETVAL_FUNC)\n );\n\n\n /*! \n * \\brief Calculates the long-run average volatility for a given E-GARCH model.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The long-run variance is not affected by our choice of shock/innovation distribution\n * \\note 4. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 5. The number of gamma-coefficients must match the number of alpha-coefficients minus one.\n * \\note 6. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_EGARCH_GOF(), NDK_EGARCH_RESID(), NDK_EGARCH_PARAM(), NDK_EGARCH_FORE(), NDK_EGARCH_VALIDATE()\n */\n int __stdcall NDK_EGARCH_LRVAR( double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Gammas, ///< [inout] are the leverage parameters (starting with the lowest lag).\n size_t g, ///< [in] is the number of elements in Gammas. Must be equal to (p-1).\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n double* retVal ///< [out] is the calculated Long run volatility.\n );\n /*! \n * \\brief Examines the model's parameters for stability constraints (e.g. stationary, positive variance, etc.). \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model.\n * \\note 4. The number of gamma-coefficients must match the number of alpha-coefficients minus one.\n * \\note 5. The number of parameters in the input argument - beta - determines the order of the GARCH component model.\n *\n * \\return status code of the operation\n * \\retval #NDK_TRUE Model is stable (i.e. variance process is stationary and yield positive values)\n * \\retval #NDK_FALSE Model is unstable.\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_AIRLINE_GOF(), NDK_AIRLINE_RESID(), NDK_AIRLINE_PARAM(), NDK_AIRLINE_FORE(), NDK_AIRLINE_FITTED()\n */\n int __stdcall NDK_EGARCH_VALIDATE( double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Gammas, ///< [inout] are the leverage parameters (starting with the lowest lag).\n size_t g, ///< [in] is the number of elements in Gammas. Must be equal to (p-1).\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n );\n ///@}\n\n\n /// \\name GARCH-M\n ///GARCH-M Functions\n /// @{\n\n /*! \n * \\brief Computes the log-likelihood ((LLF), Akaike Information Criterion (AIC) or other goodness of fit function of the GARCH model. \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model. \n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCHM_RESID(), NDK_GARCHM_PARAM(), NDK_GARCHM_FORE(), NDK_GARCHM_FITTED(), NDK_GARCHM_VALIDATE()\n */\n int __stdcall NDK_GARCHM_GOF(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n double flambda, ///< [in] is the volatility coefficient for the mean. In finance, lambda is referenced as the risk premium.\n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType, ///< [in] is a switch to select a fitness measure ( see \\ref #GOODNESS_OF_FIT_FUNC)\n double* retVal ///< [out] is the calculated goodness of fit value. \n );\n /*! \n * \\brief Returns an array of cells for the standardized residuals of a given GARCH model \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model. \n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\deprecated this function is being replaced by NDK_GARCHM_FITTED()\n * \\sa NDK_GARCHM_GOF(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCHM_RESID( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n double flambda, ///< [in] is the volatility coefficient for the mean. In finance, lambda is referenced as the risk premium.\n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType ///< [in] is a switch to select a residuals-type:raw or standardized. see \\ref #RESID_RETVAL_FUNC\n );\n /*! \n * \\brief Returns an array of cells for the initial (non-optimal), optimal or standard errors of the model's parameters.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model. \n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_RESID(), NDK_GARCH_FORE(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCHM_PARAM( double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double* mu, ///< [inout] is the GARCH model conditional mean (i.e. mu). \n double* flambda, ///< [inout] is the volatility coefficient for the mean. In finance, lambda is referenced as the risk premium.\n double* Alphas, ///< [inout] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n double* Betas, ///< [inout] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double* nu, ///< [inout] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType, ///< [in] is a switch to select the type of value returned: 1= Quick Guess, 2=Calibrated, 3= Std. Errors ( see \\ref #MODEL_RETVAL_FUNC)\n size_t maxIter ///< [in] is the maximum number of iterations used to calibrate the model. If missing or less than 100, the default maximum of 100 is assumed. \n );\n\n /*! \n * \\brief Returns a simulated data series the underlying GARCH process.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model. \n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_RESID(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCHM_SIM( double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n double flambda, ///< [in] is the volatility coefficient for the mean. In finance, lambda is referenced as the risk premium.\n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double* sigmas, ///< [in] is the univariate time series data (a one dimensional array of cells (e.g. rows or columns)) of the last q realized volatilities. \n size_t nSigmaSize, ///< [in] is the number of elements in sigmas. Only the latest q observations are used.\n UINT nSeed, ///< [in] is an unsigned integer for setting up the random number generators\n double* retArray, ///< [out] is the calculated simulation value\n size_t nSteps ///< [in] is the number of future steps to simulate for. \n );\n /*! \n * \\brief Calculates the out-of-sample forecast statistics.\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model. \n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_RESID(), NDK_GARCH_PARAM(), NDK_GARCH_FITTED(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCHM_FORE(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X.\n double* sigmas, ///< [in] is the univariate time series data (a one dimensional array of cells (e.g. rows or columns)) of the last q realized volatilities. \n size_t nSigmaSize, ///< [in] is the number of elements in sigmas. Only the latest q observations are used.\n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n double flambda, ///< [in] is the volatility coefficient for the mean. In finance, lambda is referenced as the risk premium.\n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian or Normal Distribution \n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n size_t nStep , ///< [in] is the forecast time/horizon (expressed in terms of steps beyond end of the time series). \n WORD retType, ///< [in] is a switch to select the type of value returned\n /// 1. Mean forecast \n /// 2. Forecast Error\n /// 3. Volatility term structure\n /// 4. Confidence interval lower limit\n /// 5. Confidence interval upper limit \n /// (see \\ref #FORECAST_RETVAL_FUNC)\n double alpha, ///< [in] is the statistical significance level. If missing, a default of 5% is assumed. \n double* retVal ///< [out] is the calculated forecast value\n );\n /*! \n * \\brief Returns an array of cells for the fitted values (i.e. mean, volatility and residuals)\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model. \n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCHM_GOF(), NDK_GARCHM_RESID(), NDK_GARCHM_PARAM(), NDK_GARCHM_FORE(), NDK_GARCHM_VALIDATE()\n */\n int __stdcall NDK_GARCHM_FITTED(double* pData, ///< [in] is the univariate time series data (a one dimensional array).\n size_t nSize, ///< [in] is the number of observations in X. \n double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n double flambda, ///< [in] is the volatility coefficient for the mean. In finance, lambda is referenced as the risk premium.\n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n WORD retType ///< [in] is a switch to select a output type ( see \\ref #FIT_RETVAL_FUNC) \n );\n\n /*! \n * \\brief Calculates the long-run average volatility for the given GARCH-M model\n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model. \n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCH_GOF(), NDK_GARCH_RESID(), NDK_GARCH_PARAM(), NDK_GARCH_FORE(), NDK_GARCH_VALIDATE()\n */\n int __stdcall NDK_GARCHM_LRVAR( double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n double flambda, ///< [in] is the volatility coefficient for the mean. In finance, lambda is referenced as the risk premium.\n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu, ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n double* retVal ///< [out] is the calculated long run value\n );\n /*! \n * \\brief Examines the model's parameters for stability constraints (e.g. stationary, etc.). \n * \n * \\note 1. The time series is homogeneous or equally spaced\n * \\note 2. The time series may include missing values (e.g. NaN) at either end.\n * \\note 3. The number of parameters in the input argument - alpha - determines the order of the ARCH component model. \n * \\note 4. The number of parameters in the input argument - beta - determines the order of the GARCH component model\n *\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED operation is unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_GARCHM_GOF(), NDK_ARCHM_RESID(), NDK_ARCHM_PARAM(), NDK_ARCHM_FORE(), NDK_ARCHM_FITTED()\n */\n int __stdcall NDK_GARCHM_VALIDATE(double mu, ///< [in] is the GARCH model conditional mean (i.e. mu). \n double flambda, ///< [in] is the volatility coefficient for the mean. In finance, lambda is referenced as the risk premium.\n const double* Alphas, ///< [in] are the parameters of the ARCH(p) component model (starting with the lowest lag). \n size_t p, ///< [in] is the number of elements in Alphas array\n const double* Betas, ///< [in] are the parameters of the GARCH(q) component model (starting with the lowest lag). \n size_t q, ///< [in] is the number of elements in Betas array\n WORD nInnovationType,///< [in] is the probability distribution function of the innovations/residuals (see #INNOVATION_TYPE)\n /// - INNOVATION_GAUSSIAN Gaussian Distribution (default)\n /// - INNOVATION_TDIST Student's T-Distribution, \n /// - INNOVATION_GED Generalized Error Distribution (GED)\n double nu ///< [in] is the shape factor (or degrees of freedom) of the innovations/residuals probability distribution function. \n );\n ///@}\n\n\n /// \\name Speactral Analysis\n /// @{\n /*! \n * \\brief Returns an array of cells for the convolution operator of two time series \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa SFMacros.h, NDK_LAG(), NDK_DIFF\n */\n int __stdcall NDK_CONVOLUTION(double *X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N1, ///< [in] is the number of observations in X. \n double *Y, ///< [in] is the second univariate time series data (a one dimensional array)\n size_t N2, ///< [in] is the number of observations in Y.\n double* Z, ///< [out] is the convolution time series output\n size_t* W ///< [inout] is the maximum number of elements in Z.\n );\n\n /*! \n * \\brief Calculates the inverse discrete fast Fourier transformation, recovering the time series.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa SFMacros.h, NDK_LAG(), NDK_DIFF\n */\n int __stdcall NDK_IDFT( double *amp, ///< [in] is an array of the amplitudes of the fourier transformation components.\n double *phase, ///< [in] is an array of the phase angle (radian) of the Fourier transformation components .\n size_t nSize, ///< [in] is the number of spectrum components (i.e. size of amp and phase). \n double* X, ///< [out] is the filtered (recovered) time series output\n size_t N ///< [in] is the original number of observations used to calculate the fourier transform. \n );\n\n /*! \n * \\brief Calculates the discrete fast Fourier transformation for amplitude and phase.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa SFMacros.h, NDK_LAG(), NDK_DIFF\n */\n int __stdcall NDK_DFT(double *X, ///< [in] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n double* retAmp, ///< [out] is an array of the amplitudes of the fourier transformation components\n double* retPhase, ///< [out] is an array of the phase angle (radian) of the Fourier transformation components .\n size_t M ///< [in] is the number of spectrum components (i.e. size of amp and phase)\n );\n\n\n /*! \n * \\brief computes cyclical component of given time series using the Hodrick\u0096Prescott filter.\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful (see \\ref SFMacros.h) \n * \\sa NDK_BaxterKingFilter(), NDK_DFT(), NDK_IDFT()\n */\n int __stdcall NDK_HodrickPrescotFilter(double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n BOOL bAscending, ///< [in] is the time order in the data series (i.e. the first data point's corresponding date (earliest date=1 (default), latest date=0)). \n double lambda ///< [in] is the multiplier used to penalize the variation in the trend component. If missing, a default is used based on data frequency. \n );\n\n\n /*! \n * \\brief Computes trend and cyclical component of a macroeconomic time series using Baxter-King Fixed Length Symmetric Filter. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful (see \\ref SFMacros.h) \n * \\sa NDK_HodrickPrescotFilter(), NDK_DFT(), NDK_IDFT()\n */\n int __stdcall NDK_BaxterKingFilter( double *X, ///< [inout] is the univariate time series data (a one dimensional array).\n size_t N, ///< [in] is the number of observations in X. \n BOOL bAscending, ///< [in] is the time order in the data series (i.e. the first data point's corresponding date (earliest date=1 (default), latest date=0)). \n size_t freq_min, ///< [in] is the number of periods for the high pass filter (e.g. 6 for quarterly data, 18 for monthly data). \n size_t freq_max, ///< [in] is the number of periods for the low passfilter (e.g. 32 for quarterly data, 96 for montly data). \n size_t K, ///< [in] is the number of points(aka terms) to use in the approximate optimal filter. If missing, a default value of 12 is assumed \n BOOL drift, ///< [in] is a logical value: FALSE if no drift in time series (default), TRUE if drift in time series. \n BOOL unitroot, ///< [in] is a logical value: FALSE if no unit-root is in time series (default), TRUE if unit-root is in time series. \n WORD retTYpe ///< [in] is the integer enumeration for the filter output: (1= trend component (default), 2=cyclical component, 3=noise component) \n );\n\n\n ///@}\n\n\n /*!\n * \\name Portfolio Analysis\n * \\brief \n * @{\n */\n /// \\brief compute the portfolio equivalent returns\n /*! \n * \\brief Calculates the portfolio equivalent return. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful (see \\ref SFMacros.h) \n * \\sa NDK_PORTFOLIO_VARIANCE(), NDK_PORTFOLIO_COVARIANCE()\n */\n int __stdcall NDK_PORTFOLIO_RET(double* weights, size_t nAssets, double* returns, double* ret);\n\n /*! \n * \\brief Calculates the overall portfolio variance (volatility squared). \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful (see \\ref SFMacros.h) \n * \\sa NDK_PORTFOLIO_RET(), NDK_PORTFOLIO_COVARIANCE()\n */\n int __stdcall NDK_PORTFOLIO_VARIANCE(double* weights, size_t nAssets, double** covar, double* variance);\n\n /*! \n * \\brief Calculates the covariance between two portfolios. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation is unsuccessful (see \\ref SFMacros.h) \n * \\sa NDK_PORTFOLIO_RET(), NDK_PORTFOLIO_VARIANCE()\n */\n int __stdcall NDK_PORTFOLIO_COVARIANCE(double* weights1, double* weights2, size_t nAssets, double** covar, double* retVal);\n\n ///@}\n\n\n /*!\n * \\name Utilities\n * \n * @{\n */\n /*! \n * \\brief estimate the value of the function represented by (x,y) data set at an intermediate x-value. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval #NDK_FAILED Operation unsuccessful (see \\ref SFMacros.h)\n * \\sa NDK_WMA(), NDK_EWMA()\n */\n int __stdcall\tNDK_INTERPOLATE(double* X, ///< [in] is the x-component of the input data table (a one dimensional array)\n size_t Nx, ///< [in] is the number of elements in X\n double* Y, ///< [in] is the y-component of the input data table (a one dimensional array)\n size_t Ny, ///< [in] is the number of elements in Y \n double* XT, ///< [in] is the desired x-value(s) to interpolate for (a single value or a one dimensional array).\n size_t Nxt, ///< [in] is the number of elements in XT\n WORD nMethod, ///< [in] is the interpolation method (1=Forward Flat, 2=Backward Flat, 3=Linear, 4=Cubic Spline). \n /// 1. Forward Flat\n /// 2. Backward Flat\n /// 3. Linear \n /// 4. Cublic Spline\n BOOL extrapolate, ///< [in] sets whether or not to allow extrapolation (1=Yes, 0=No). If missing, the default is to not allow extrapolation\n double* YVals, ///< [out] is the output buffer to store the interpolated values\n size_t Nyvals ///< [in] is the number of elements in YVals (must equal to Nxt).\n );\n\n /*! \n * \\brief Query & retrieve NumXL SDK environment information \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa SFMacros.h, NDK_Init()\n */\n int __stdcall NDK_INFO(int nRetType, ///< [in] is a key/identifier to select the desired output\n /// 1. Version Number (default\n /// 2. Release\n /// 3. License Key\n /// 4. License Level\n /// 5. License Expiry Date\n /// 6. Installation Path\n /// 7. Data (e.g. Log-file) Path\n /// 8. Computer ID(unique identifier)\n LPTSTR szMsg, ///< [out] The buffer that will receive the return value\n int nSize ///< [inout] maximum number of characters to copy to the buffer.\n );\n /*! \n * \\brief write a log message to the logging system \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa SFMacros.h, SFLogger.h, SFLOG_LogMsg()\n */\n int __stdcall NDK_MSG( int nRetCode, ///< [in] is the log level (1=trace, 2=Debug, 3=Info, 4=Warn, 5=Error, 6=Fatal Error)\n LPTSTR pMsg, ///< [in] is the log message\n size_t nSize ///< [in] us the number of characters in pMsg\n );\n\n\n /*!\n * \\brief set the seed value of the random number generator\n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa SFMacros.h, SFSDK.h\n */\n int __stdcall NDK_RNG_SEED(ULONG ulSeed ///< [in] is the new seed value for the random number generator\n );\n\n /*! \n * \\brief Locate and return the full path of the default editor (e.g. notepad) in the system \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa SFMacros.h, NDK_Init()\n */\n int __stdcall NDK_DEFAULT_EDITOR (LPTSTR szFullPath, ///< [out] is the buffer that will receive the return value\n size_t* nSize ///< [inout] is the maximum number of characters to copy to the buffer.\n );\n\n\n /*! \n * \\brief Returns the n-th token/substring in a string after splitting it using a given delimiter \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa NDK_REGEX_REPLACE(), NDK_REGEX_MATCH()\n */\n int __stdcall NDK_TOKENIZE (LPCTSTR szTxt, ///< [in] is the input string to match for. \n LPCTSTR szDelim, ///< [in] is the character to use for splitting the string. If missing, comma (,) is used. \n short nOrder, ///< [in] is the order of the token to return, where first = 1, second = 2,..., and last = -1. If missing, the first token is returned\n LPTSTR pRetVal, ///< [out] is the n-th token/substring in a string\n size_t nSize ///< [in] is the number of characters in pRetVal buffer\n );\n /*! \n * \\brief Returns TRUE if the string matches the regular expression expressed \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa NDK_REGEX_REPLACE(), NDK_TOKENIZE()\n */\n int __stdcall NDK_REGEX_MATCH( LPCTSTR szLine, ///< [in] is the input string to match for.\n LPCTSTR szPattern, ///< [in] is the regular expression (regex PERL-style) to match the input string with (e.g. ^Thi[sS].*$). \n BOOL ignoreCase, ///< [in] is a flag to instruct the function to ignore the letter-case in the string\n BOOL partialOK, ///< [in] is a flag/switch to indicate whether a substring or a partial match (search) is permitted or to only consider full-string match.\n BOOL* bMatch ///< [out] is the return value of the match.\n );\n\n /*! \n * \\brief Returns the modified string after performing match/replace on the given string. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa NDK_REGEX_REPLACE(), NDK_TOKENIZE()\n */\n int __stdcall NDK_REGEX_REPLACE(LPCTSTR szLine, ///< [in] is the input string to process. \n LPCTSTR szKey, ///< [in] is the regular expression (PERL-style) (e.g. \"^\\d\\w{1,2}.*$\"). \n LPCTSTR szValue, ///< [in] is the value to replace the match with. If missing or omitted, an empty string is used\n BOOL ignoreCase, ///< [in] is a flag to instruct the matching function whether to ignore letter-case. If missing, ignore_case is set to TRUE\n BOOL global, ///< [in] is a flag to instruct the function whether to match and replace the first occurence (FALSE) or all the matches (TRUE). \n LPTSTR pRetVal, ///< [out] is the modified string after replacement\n size_t nSize ///< [in] is the size of the output buffer (pRetVal)\n );\n /*! \n * \\brief calculates the value of the regression function for an intermediate x-value. \n * \\return status code of the operation\n * \\retval #NDK_SUCCESS Operation successful\n * \\retval Error code\n * \\sa NDK_TREND(), NDK_DETREND()\n */\n int __stdcall\tNDK_REGRESSION( double* X, ///< [in] is the x-component of the input data table (a one dimensional array).\n size_t nX, ///< [in] is the number of elements in X.\n double* Y, ///< [in] is the y-component (i.e. function) of the input data table (a one dimensional array).\n size_t nY, ///< [in] is the number of elements in Y\n WORD nRegressType, ///< [in] is the model description flag for the trend function (1 = Linear (default), 2 = Polynomial, 3 = Exponential, 4 = Logarithmic, 5 = Power). \n WORD POrder, ///< [in] is the polynomial order. This is only relevant for a polynomial type of trend and is ignored for all others. If missing, POrder = 1.\n double intercept, ///< [in] is the constant or the intercept value to fix (e.g. zero). If missing (NaN), an intercept will not be fixed and is computed normally.\n double target, ///< [in] is the desired x-value to calculate regression value for (a single value). \n WORD nRetType, ///< [in] is a switch to select the return output (1 = Forecast value (default), 2 = Upper limit, 3 = Lower Limit, 4 = R-Squared). \n double alpha, ///< [in] is the statistical significance or confidence level (i.e. alpha). If missing or omitted, an alpha value of 5% is assumed\n double* retVal ///< [out] is the calculated value\n );\n ///@}\n\n}\n/// @}", "meta": {"content_hash": "72a47784762482a67fe4141953762fc8", "timestamp": "", "source": "github", "line_count": 5079, "max_line_length": 341, "avg_line_length": 74.60858436700138, "alnum_prop": 0.5582590245872006, "repo_name": "spiderxl/sdk", "id": "73cc4f524ba8dcf455ab31eb18b9f08a36e34af3", "size": "379454", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "include/SFSDK.h", "mode": "33188", "license": "mit", "language": [{"name": "C", "bytes": "430124"}, {"name": "C#", "bytes": "23001"}, {"name": "C++", "bytes": "8895"}, {"name": "Visual Basic", "bytes": "218372"}]}} {"text": "\n\npackage org.apache.kafka.copycat.runtime;\n\nimport org.apache.kafka.common.KafkaException;\nimport org.apache.kafka.common.utils.SystemTime;\nimport org.apache.kafka.common.utils.Time;\nimport org.apache.kafka.clients.producer.KafkaProducer;\nimport org.apache.kafka.clients.producer.ProducerConfig;\nimport org.apache.kafka.common.utils.Utils;\nimport org.apache.kafka.copycat.cli.WorkerConfig;\nimport org.apache.kafka.copycat.connector.Connector;\nimport org.apache.kafka.copycat.connector.ConnectorContext;\nimport org.apache.kafka.copycat.connector.Task;\nimport org.apache.kafka.copycat.errors.CopycatException;\nimport org.apache.kafka.copycat.sink.SinkTask;\nimport org.apache.kafka.copycat.source.SourceTask;\nimport org.apache.kafka.copycat.storage.*;\nimport org.apache.kafka.copycat.util.ConnectorTaskId;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport java.util.Set;\n\n/**\n *

\n * Worker runs a (dynamic) set of tasks in a set of threads, doing the work of actually moving\n * data to/from Kafka.\n *

\n *

\n * Since each task has a dedicated thread, this is mainly just a container for them.\n *

\n */\npublic class Worker {\n private static final Logger log = LoggerFactory.getLogger(Worker.class);\n\n private Time time;\n private WorkerConfig config;\n private Converter keyConverter;\n private Converter valueConverter;\n private Converter internalKeyConverter;\n private Converter internalValueConverter;\n private OffsetBackingStore offsetBackingStore;\n private HashMap connectors = new HashMap<>();\n private HashMap tasks = new HashMap<>();\n private KafkaProducer producer;\n private SourceTaskOffsetCommitter sourceTaskOffsetCommitter;\n\n public Worker(WorkerConfig config, OffsetBackingStore offsetBackingStore) {\n this(new SystemTime(), config, offsetBackingStore);\n }\n\n @SuppressWarnings(\"unchecked\")\n public Worker(Time time, WorkerConfig config, OffsetBackingStore offsetBackingStore) {\n this.time = time;\n this.config = config;\n this.keyConverter = config.getConfiguredInstance(WorkerConfig.KEY_CONVERTER_CLASS_CONFIG, Converter.class);\n this.keyConverter.configure(config.originalsWithPrefix(\"key.converter.\"), true);\n this.valueConverter = config.getConfiguredInstance(WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG, Converter.class);\n this.valueConverter.configure(config.originalsWithPrefix(\"value.converter.\"), false);\n this.internalKeyConverter = config.getConfiguredInstance(WorkerConfig.INTERNAL_KEY_CONVERTER_CLASS_CONFIG, Converter.class);\n this.internalKeyConverter.configure(config.originalsWithPrefix(\"internal.key.converter.\"), true);\n this.internalValueConverter = config.getConfiguredInstance(WorkerConfig.INTERNAL_VALUE_CONVERTER_CLASS_CONFIG, Converter.class);\n this.internalValueConverter.configure(config.originalsWithPrefix(\"internal.value.converter.\"), false);\n\n this.offsetBackingStore = offsetBackingStore;\n this.offsetBackingStore.configure(config.originals());\n }\n\n public void start() {\n log.info(\"Worker starting\");\n\n Properties unusedConfigs = config.unusedProperties();\n\n Map producerProps = new HashMap<>();\n producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Utils.join(config.getList(WorkerConfig.BOOTSTRAP_SERVERS_CONFIG), \",\"));\n producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, \"org.apache.kafka.common.serialization.ByteArraySerializer\");\n producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, \"org.apache.kafka.common.serialization.ByteArraySerializer\");\n for (String propName : unusedConfigs.stringPropertyNames()) {\n producerProps.put(propName, unusedConfigs.getProperty(propName));\n }\n producer = new KafkaProducer<>(producerProps);\n\n offsetBackingStore.start();\n sourceTaskOffsetCommitter = new SourceTaskOffsetCommitter(time, config);\n\n log.info(\"Worker started\");\n }\n\n public void stop() {\n log.info(\"Worker stopping\");\n\n long started = time.milliseconds();\n long limit = started + config.getLong(WorkerConfig.TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_CONFIG);\n\n for (Map.Entry entry : connectors.entrySet()) {\n Connector conn = entry.getValue();\n log.warn(\"Shutting down connector {} uncleanly; herder should have shut down connectors before the\" +\n \"Worker is stopped.\", conn);\n try {\n conn.stop();\n } catch (CopycatException e) {\n log.error(\"Error while shutting down connector \" + conn, e);\n }\n }\n\n for (Map.Entry entry : tasks.entrySet()) {\n WorkerTask task = entry.getValue();\n log.warn(\"Shutting down task {} uncleanly; herder should have shut down \"\n + \"tasks before the Worker is stopped.\", task);\n try {\n task.stop();\n } catch (CopycatException e) {\n log.error(\"Error while shutting down task \" + task, e);\n }\n }\n\n for (Map.Entry entry : tasks.entrySet()) {\n WorkerTask task = entry.getValue();\n log.debug(\"Waiting for task {} to finish shutting down\", task);\n if (!task.awaitStop(Math.max(limit - time.milliseconds(), 0)))\n log.error(\"Graceful shutdown of task {} failed.\", task);\n task.close();\n }\n\n long timeoutMs = limit - time.milliseconds();\n sourceTaskOffsetCommitter.close(timeoutMs);\n\n offsetBackingStore.stop();\n\n log.info(\"Worker stopped\");\n }\n\n /**\n * Add a new connector.\n * @param connConfig connector configuration\n * @param ctx context for the connector\n */\n public void addConnector(ConnectorConfig connConfig, ConnectorContext ctx) {\n String connName = connConfig.getString(ConnectorConfig.NAME_CONFIG);\n Class maybeConnClass = connConfig.getClass(ConnectorConfig.CONNECTOR_CLASS_CONFIG);\n log.info(\"Creating connector {} of type {}\", connName, maybeConnClass.getName());\n\n Class connClass;\n try {\n connClass = maybeConnClass.asSubclass(Connector.class);\n } catch (ClassCastException e) {\n throw new CopycatException(\"Specified class is not a subclass of Connector: \" + maybeConnClass.getName());\n }\n\n if (connectors.containsKey(connName))\n throw new CopycatException(\"Connector with name \" + connName + \" already exists\");\n\n final Connector connector = instantiateConnector(connClass);\n connector.initialize(ctx);\n try {\n Map originals = connConfig.originals();\n Properties props = new Properties();\n props.putAll(originals);\n connector.start(props);\n } catch (CopycatException e) {\n throw new CopycatException(\"Connector threw an exception while starting\", e);\n }\n\n connectors.put(connName, connector);\n\n log.info(\"Finished creating connector {}\", connName);\n }\n\n private static Connector instantiateConnector(Class connClass) {\n try {\n return Utils.newInstance(connClass);\n } catch (Throwable t) {\n // Catches normal exceptions due to instantiation errors as well as any runtime errors that\n // may be caused by user code\n throw new CopycatException(\"Failed to create connector instance\", t);\n }\n }\n\n public Map> reconfigureConnectorTasks(String connName, int maxTasks, List sinkTopics) {\n log.trace(\"Reconfiguring connector tasks for {}\", connName);\n\n Connector connector = connectors.get(connName);\n if (connector == null)\n throw new CopycatException(\"Connector \" + connName + \" not found in this worker.\");\n\n Map> result = new HashMap<>();\n String taskClassName = connector.taskClass().getName();\n int index = 0;\n for (Properties taskProps : connector.taskConfigs(maxTasks)) {\n ConnectorTaskId taskId = new ConnectorTaskId(connName, index);\n index++;\n Map taskConfig = Utils.propsToStringMap(taskProps);\n taskConfig.put(TaskConfig.TASK_CLASS_CONFIG, taskClassName);\n if (sinkTopics != null)\n taskConfig.put(SinkTask.TOPICS_CONFIG, Utils.join(sinkTopics, \",\"));\n result.put(taskId, taskConfig);\n }\n return result;\n }\n\n public void stopConnector(String connName) {\n log.info(\"Stopping connector {}\", connName);\n\n Connector connector = connectors.get(connName);\n if (connector == null)\n throw new CopycatException(\"Connector \" + connName + \" not found in this worker.\");\n\n try {\n connector.stop();\n } catch (CopycatException e) {\n log.error(\"Error shutting down connector {}: \", connector, e);\n }\n\n connectors.remove(connName);\n\n log.info(\"Stopped connector {}\", connName);\n }\n\n /**\n * Get the IDs of the connectors currently running in this worker.\n */\n public Set connectorNames() {\n return connectors.keySet();\n }\n\n /**\n * Add a new task.\n * @param id Globally unique ID for this task.\n * @param taskConfig the parsed task configuration\n */\n public void addTask(ConnectorTaskId id, TaskConfig taskConfig) {\n log.info(\"Creating task {}\", id);\n\n if (tasks.containsKey(id)) {\n String msg = \"Task already exists in this worker; the herder should not have requested \"\n + \"that this : \" + id;\n log.error(msg);\n throw new CopycatException(msg);\n }\n\n final Task task = instantiateTask(taskConfig.getClass(TaskConfig.TASK_CLASS_CONFIG).asSubclass(Task.class));\n\n // Decide which type of worker task we need based on the type of task.\n final WorkerTask workerTask;\n if (task instanceof SourceTask) {\n SourceTask sourceTask = (SourceTask) task;\n OffsetStorageReader offsetReader = new OffsetStorageReaderImpl(offsetBackingStore, id.connector(),\n internalKeyConverter, internalValueConverter);\n OffsetStorageWriter offsetWriter = new OffsetStorageWriter(offsetBackingStore, id.connector(),\n internalKeyConverter, internalValueConverter);\n workerTask = new WorkerSourceTask(id, sourceTask, keyConverter, valueConverter, producer,\n offsetReader, offsetWriter, config, time);\n } else if (task instanceof SinkTask) {\n workerTask = new WorkerSinkTask(id, (SinkTask) task, config, keyConverter, valueConverter, time);\n } else {\n log.error(\"Tasks must be a subclass of either SourceTask or SinkTask\", task);\n throw new CopycatException(\"Tasks must be a subclass of either SourceTask or SinkTask\");\n }\n\n // Start the task before adding modifying any state, any exceptions are caught higher up the\n // call chain and there's no cleanup to do here\n Properties props = new Properties();\n props.putAll(taskConfig.originals());\n workerTask.start(props);\n if (task instanceof SourceTask) {\n WorkerSourceTask workerSourceTask = (WorkerSourceTask) workerTask;\n sourceTaskOffsetCommitter.schedule(id, workerSourceTask);\n }\n tasks.put(id, workerTask);\n }\n\n private static Task instantiateTask(Class taskClass) {\n try {\n return Utils.newInstance(taskClass);\n } catch (KafkaException e) {\n throw new CopycatException(\"Task class not found\", e);\n }\n }\n\n public void stopTask(ConnectorTaskId id) {\n log.info(\"Stopping task {}\", id);\n\n WorkerTask task = getTask(id);\n if (task instanceof WorkerSourceTask)\n sourceTaskOffsetCommitter.remove(id);\n task.stop();\n if (!task.awaitStop(config.getLong(WorkerConfig.TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_CONFIG)))\n log.error(\"Graceful stop of task {} failed.\", task);\n task.close();\n tasks.remove(id);\n }\n\n /**\n * Get the IDs of the tasks currently running in this worker.\n */\n public Set taskIds() {\n return tasks.keySet();\n }\n\n private WorkerTask getTask(ConnectorTaskId id) {\n WorkerTask task = tasks.get(id);\n if (task == null) {\n log.error(\"Task not found: \" + id);\n throw new CopycatException(\"Task not found: \" + id);\n }\n return task;\n }\n\n public Converter getInternalKeyConverter() {\n return internalKeyConverter;\n }\n\n public Converter getInternalValueConverter() {\n return internalValueConverter;\n }\n}\n", "meta": {"content_hash": "6dc7afa4b06ec1e47ae7f5bc03bc904f", "timestamp": "", "source": "github", "line_count": 321, "max_line_length": 139, "avg_line_length": 41.308411214953274, "alnum_prop": 0.6619909502262443, "repo_name": "bluebreezecf/kafka", "id": "b37e49f82b0bfabc61ddb7dc8a9db121a92194e6", "size": "14069", "binary": false, "copies": "1", "ref": "refs/heads/trunk", "path": "copycat/runtime/src/main/java/org/apache/kafka/copycat/runtime/Worker.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "Batchfile", "bytes": "20579"}, {"name": "HTML", "bytes": "5443"}, {"name": "Java", "bytes": "3079838"}, {"name": "Python", "bytes": "481095"}, {"name": "Scala", "bytes": "2641655"}, {"name": "Shell", "bytes": "92527"}, {"name": "XSLT", "bytes": "7116"}]}} {"text": "\n\n\npackage org.apache.geode.internal.cache.entries;\n\n// DO NOT modify this class. It was generated from LeafRegionEntry.cpp\nimport java.util.concurrent.atomic.AtomicLongFieldUpdater;\n\nimport org.apache.geode.internal.cache.DiskId;\nimport org.apache.geode.internal.cache.DiskStoreImpl;\nimport org.apache.geode.internal.cache.PlaceHolderDiskRegion;\nimport org.apache.geode.internal.cache.RegionEntry;\nimport org.apache.geode.internal.cache.RegionEntryContext;\nimport org.apache.geode.internal.cache.eviction.EvictionController;\nimport org.apache.geode.internal.cache.persistence.DiskRecoveryStore;\nimport org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;\n\n/*\n * macros whose definition changes this class:\n *\n * disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP\n *\n * One of the following key macros must be defined:\n *\n * key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1:\n * KEY_STRING1 key string2: KEY_STRING2\n */\n/**\n * Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run\n * ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory).\n */\npublic class VMThinDiskRegionEntryHeapIntKey extends VMThinDiskRegionEntryHeap {\n // --------------------------------------- common fields ----------------------------------------\n private static final AtomicLongFieldUpdater LAST_MODIFIED_UPDATER =\n AtomicLongFieldUpdater.newUpdater(VMThinDiskRegionEntryHeapIntKey.class, \"lastModified\");\n protected int hash;\n private HashEntry nextEntry;\n @SuppressWarnings(\"unused\")\n private volatile long lastModified;\n private volatile Object value;\n // ---------------------------------------- disk fields -----------------------------------------\n /**\n * @since GemFire 5.1\n */\n protected DiskId id;\n // --------------------------------------- key fields -------------------------------------------\n // DO NOT modify this class. It was generated from LeafRegionEntry.cpp\n private final int key;\n\n public VMThinDiskRegionEntryHeapIntKey(final RegionEntryContext context, final int key,\n final Object value) {\n super(context, (value instanceof RecoveredEntry ? null : value));\n // DO NOT modify this class. It was generated from LeafRegionEntry.cpp\n initialize(context, value);\n this.key = key;\n }\n\n // DO NOT modify this class. It was generated from LeafRegionEntry.cpp\n @Override\n protected Object getValueField() {\n return this.value;\n }\n\n @Override\n protected void setValueField(final Object value) {\n this.value = value;\n }\n\n @Override\n protected long getLastModifiedField() {\n return LAST_MODIFIED_UPDATER.get(this);\n }\n\n @Override\n protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) {\n return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue);\n }\n\n @Override\n public int getEntryHash() {\n return this.hash;\n }\n\n @Override\n protected void setEntryHash(final int hash) {\n this.hash = hash;\n }\n\n @Override\n public HashEntry getNextEntry() {\n return this.nextEntry;\n }\n\n @Override\n public void setNextEntry(final HashEntry nextEntry) {\n this.nextEntry = nextEntry;\n }\n\n // ----------------------------------------- disk code ------------------------------------------\n // DO NOT modify this class. It was generated from LeafRegionEntry.cpp\n protected void initialize(final RegionEntryContext context, final Object value) {\n diskInitialize(context, value);\n }\n\n @Override\n public int updateAsyncEntrySize(final EvictionController evictionController) {\n throw new IllegalStateException(\"should never be called\");\n }\n\n // DO NOT modify this class. It was generated from LeafRegionEntry.cpp\n @Override\n public DiskId getDiskId() {\n return this.id;\n }\n\n @Override\n public void setDiskId(final RegionEntry oldEntry) {\n this.id = ((DiskEntry) oldEntry).getDiskId();\n }\n\n private void diskInitialize(final RegionEntryContext context, final Object value) {\n DiskRecoveryStore diskRecoveryStore = (DiskRecoveryStore) context;\n DiskStoreImpl diskStore = diskRecoveryStore.getDiskStore();\n long maxOplogSize = diskStore.getMaxOplogSize();\n // get appropriate instance of DiskId implementation based on maxOplogSize\n this.id = DiskId.createDiskId(maxOplogSize, true, diskStore.needsLinkedList());\n Helper.initialize(this, diskRecoveryStore, value);\n }\n\n // ----------------------------------------- key code -------------------------------------------\n // DO NOT modify this class. It was generated from LeafRegionEntry.cpp\n @Override\n public Object getKey() {\n return this.key;\n }\n\n @Override\n public boolean isKeyEqual(final Object key) {\n if (key instanceof Integer) {\n return ((Integer) key).intValue() == this.key;\n }\n return false;\n }\n // DO NOT modify this class. It was generated from LeafRegionEntry.cpp\n}\n", "meta": {"content_hash": "e13cb1c42ae90d6e3f52ddd628c608d8", "timestamp": "", "source": "github", "line_count": 145, "max_line_length": 102, "avg_line_length": 34.84827586206897, "alnum_prop": 0.687908173362359, "repo_name": "smanvi-pivotal/geode", "id": "92363f96a7e310faa9eb04541b8f2908e9d9da16", "size": "5842", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "geode-core/src/main/java/org/apache/geode/internal/cache/entries/VMThinDiskRegionEntryHeapIntKey.java", "mode": "33188", "license": "apache-2.0", "language": [{"name": "CSS", "bytes": "106707"}, {"name": "Groovy", "bytes": "2928"}, {"name": "HTML", "bytes": "3998074"}, {"name": "Java", "bytes": "26700079"}, {"name": "JavaScript", "bytes": "1781013"}, {"name": "Ruby", "bytes": "6751"}, {"name": "Shell", "bytes": "21891"}]}} {"text": "\ufeffusing System.Reflection;\nusing System.Resources;\nusing System.Runtime.CompilerServices;\nusing System.Runtime.InteropServices;\nusing System.Windows;\n\n// General Information about an assembly is controlled through the following \n// set of attributes. Change these attribute values to modify the information\n// associated with an assembly.\n[assembly: AssemblyTitle(\"nil-runtime\")]\n[assembly: AssemblyDescription(\"\")]\n[assembly: AssemblyConfiguration(\"\")]\n[assembly: AssemblyCompany(\"\")]\n[assembly: AssemblyProduct(\"nil-runtime\")]\n[assembly: AssemblyCopyright(\"Copyright \u00a9 2016\")]\n[assembly: AssemblyTrademark(\"\")]\n[assembly: AssemblyCulture(\"\")]\n\n// Setting ComVisible to false makes the types in this assembly not visible \n// to COM components. If you need to access a type in this assembly from \n// COM, set the ComVisible attribute to true on that type.\n[assembly: ComVisible(false)]\n\n//In order to begin building localizable applications, set \n//CultureYouAreCodingWith in your .csproj file\n//inside a . For example, if you are using US english\n//in your source files, set the to en-US. Then uncomment\n//the NeutralResourceLanguage attribute below. Update the \"en-US\" in\n//the line below to match the UICulture setting in the project file.\n\n//[assembly: NeutralResourcesLanguage(\"en-US\", UltimateResourceFallbackLocation.Satellite)]\n\n\n[assembly: ThemeInfo(\n ResourceDictionaryLocation.None, //where theme specific resource dictionaries are located\n //(used if a resource is not found in the page, \n // or application resource dictionaries)\n ResourceDictionaryLocation.SourceAssembly //where the generic resource dictionary is located\n //(used if a resource is not found in the page, \n // app, or any theme specific resource dictionaries)\n)]\n\n\n// Version information for an assembly consists of the following four values:\n//\n// Major Version\n// Minor Version \n// Build Number\n// Revision\n//\n// You can specify all the values or you can default the Build and Revision Numbers \n// by using the '*' as shown below:\n// [assembly: AssemblyVersion(\"1.0.*\")]\n[assembly: AssemblyVersion(\"1.0.0.0\")]\n[assembly: AssemblyFileVersion(\"1.0.0.0\")]\n", "meta": {"content_hash": "77481f0647e02de905ab2c3f50bc9b00", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 98, "avg_line_length": 43.236363636363635, "alnum_prop": 0.7060555088309504, "repo_name": "zhongzf/nativescript-dotnet-runtime", "id": "9bfbc4e51d7048250d63d6d0c6a7f02e1c6095eb", "size": "2381", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "nil-runtime/Properties/AssemblyInfo.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "85788"}, {"name": "JavaScript", "bytes": "2760248"}]}} {"text": "
{% csrf_token %}\r\n \u00bfEst\u00e1s seguro que deseas borrar \"{{ object }}\"?\r\n \r\n
", "meta": {"content_hash": "c4fef9e41ae222ee1cc0fea65addde63", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 51, "avg_line_length": 35.5, "alnum_prop": 0.5985915492957746, "repo_name": "GoberInfinity/ExampleDjango", "id": "417a387fe6c3466b70981ca6fae8d52886505327", "size": "144", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "crudclassviews/templates/crudclassviews/person_confirm_delete.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "187"}, {"name": "HTML", "bytes": "2054"}, {"name": "JavaScript", "bytes": "615"}, {"name": "Python", "bytes": "16642"}]}} {"text": "import * as yargs from \"yargs\";\n\nexport function initCLI() {\n console.log(\"init cli\");\n}\n", "meta": {"content_hash": "13878994ea13d4b9806c7eaf242fb0f4", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 31, "avg_line_length": 18.4, "alnum_prop": 0.6521739130434783, "repo_name": "pnml/fuse-box", "id": "b78d2c491e5886bae60ff92173defd759a4f0119", "size": "92", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/cli/entry.ts", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "533"}, {"name": "HTML", "bytes": "44"}, {"name": "JavaScript", "bytes": "646157"}, {"name": "TypeScript", "bytes": "926593"}]}} {"text": "/*global module, test, equal, ok, jQuery */\n(function ($) {\n \"use strict\";\n\n /* If there is no delegation support, forcibly reset the plugin between\n * test runs\n */\n function resetPlugin() {\n if (!$.fn.on && !$.fn.delegate && !$.fn.live) {\n $.fn.example.boundClassNames = [];\n }\n }\n\n module(\"Basic usage\", {\n setup: function () {\n $('#basic1').example('Test');\n $('#basicform').submit(function (e) {\n e.preventDefault();\n });\n },\n teardown: resetPlugin\n });\n test(\"should have an example set\", function () {\n equal($('#basic1').val(), \"Test\", \"The example should read 'Test'.\");\n ok($('#basic1').hasClass('example'), \"The class should be 'example'.\");\n });\n test(\"should be cleared on focus\", function () {\n $('#basic1').focus();\n\n equal($('#basic1').val(), \"\", \"The example should be cleared.\");\n ok(!$('#basic1').hasClass('example'), \"The class should no longer be 'example'.\");\n });\n test(\"should reappear on blur if empty\", function () {\n $('#basic1').focus().blur();\n\n equal($('#basic1').val(), \"Test\", \"The example should read 'Test'.\");\n ok($('#basic1').hasClass('example'), \"The class should be 'example'.\");\n });\n test(\"should not be populated with an example on blur if user input is present\", function () {\n $('#basic1').focus();\n $('#basic1').val(\"My own value\");\n $('#basic1').blur();\n\n equal($('#basic1').val(), \"My own value\", \"The example should not be cleared.\");\n ok(!$('#basic1').hasClass('example'), \"The class should not be 'example'.\");\n });\n test(\"should not be populated with an example on focus if user input is present\", function () {\n $('#basic1').focus().val(\"My own value\").blur().focus();\n\n equal($('#basic1').val(), \"My own value\", \"The example should not be cleared.\");\n ok(!$('#basic1').hasClass('example'), \"The class should not be 'example'.\");\n });\n test(\"should be cleared on form submit\", function () {\n $('#basicform').submit();\n\n equal($('#basic1').val(), \"\", \"The example should be cleared.\");\n });\n test(\"shouldn't clear user inputs on form submit\", function () {\n $('#basic2').focus().val(\"User input\");\n $('#basicform').triggerHandler('submit');\n\n equal($('#basic2').val(), \"User input\", \"The user input should be intact.\");\n });\n\n module(\"Using custom classes\", {\n setup: function () {\n $('#custom1').example(\"Test\", {className: \"notExample\"});\n },\n teardown: resetPlugin\n });\n test(\"should have an example set\", function () {\n equal($('#custom1').val(), \"Test\", \"The example should be set.\");\n ok($('#custom1').hasClass('notExample'), \"The class should be the specified one.\");\n ok(!$('#custom1').hasClass('example'), \"The class should not be 'example'.\");\n });\n test(\"should be cleared on focus\", function () {\n $('#custom1').focus();\n\n equal($('#custom1').val(), \"\", \"The example should be cleared.\");\n ok(!$('#custom1').hasClass('notExample'), \"The class should not be the specified one.\");\n });\n test(\"should be reappear on blur\", function () {\n $('#custom1').focus().blur();\n\n equal($('#custom1').val(), \"Test\", \"The example should reappear.\");\n ok($('#custom1').hasClass('notExample'), \"The class should be the specified one.\");\n });\n\n module(\"Multiple forms\", {\n setup: function () {\n $('#multipleform1, #multipleform2').submit(function (e) {\n e.preventDefault();\n });\n $('#mf1').example('Test');\n $('#mf2').example('Test');\n },\n teardown: resetPlugin\n });\n test(\"should only clear examples in that form\", function () {\n $('#multipleform1').submit();\n\n equal($('#mf1').val(), \"\", \"The example should be cleared.\");\n equal($('#mf2').val(), \"Test\", \"An example in another form should not be cleared.\");\n });\n\n module(\"Simple callback\", {\n setup: function () {\n $('#callback1').example(function () { return \"Callback Test\"; });\n },\n teardown: resetPlugin\n });\n test(\"should have an example set\", function () {\n equal($('#callback1').val(), \"Callback Test\", \"The example should read 'Callback Test'.\");\n ok($('#callback1').hasClass('example'), \"The class should be 'example'.\");\n });\n test(\"should be cleared on focus\", function () {\n $('#callback1').focus();\n\n equal($('#callback1').val(), \"\", \"The example should be cleared.\");\n ok(!$('#callback1').hasClass('example'), \"The class should no longer be 'example'.\");\n });\n test(\"should reappear on blur if empty\", function () {\n $('#callback1').focus().blur();\n\n equal($('#callback1').val(), \"Callback Test\", \"The example should read 'Callback Test'.\");\n ok($('#callback1').hasClass('example'), \"The class should be 'example'.\");\n });\n\n module(\"More complicated callback\", {\n setup: function () {\n $('#callback2').example(function () {\n return $(this).attr('title');\n });\n },\n teardown: resetPlugin\n });\n test(\"should have an example set\", function () {\n equal($('#callback2').val(), \"Starting\", \"The example should read 'Starting'.\");\n ok($('#callback2').hasClass('example'), \"The class should be 'example'.\");\n });\n test(\"should be cleared on focus\", function () {\n $('#callback2').focus();\n\n equal($('#callback2').val(), \"\", \"The example should be cleared.\");\n ok(!$('#callback2').hasClass('example'), \"The class should no longer be 'example'.\");\n });\n test(\"should reappear on blur if empty\", function () {\n $('#callback2').focus().blur();\n\n equal($('#callback2').val(), \"Starting\", \"The example should read 'Starting'.\");\n ok($('#callback2').hasClass('example'), \"The class should be 'example'.\");\n });\n test(\"should run the callback every time instead of caching it\", function () {\n $('#callback2').attr('title', 'Another');\n $('#callback2').focus().blur();\n\n equal($('#callback2').val(), \"Another\", \"The example should read 'Another'.\");\n ok($('#callback2').hasClass('example'), \"The class should be 'example'.\");\n });\n\n module(\"Metadata plugin\", {\n setup: function () {\n $('#m1').example();\n },\n teardown: resetPlugin\n });\n test(\"should have an example set\", function () {\n equal($('#m1').val(), \"Something\", \"The example should read 'Something'.\");\n ok($('#m1').hasClass('m1'), \"The class should be 'm1'.\");\n });\n test(\"should be cleared on focus\", function () {\n $('#m1').focus();\n\n equal($('#m1').val(), \"\", \"The example should be cleared.\");\n ok(!$('#m1').hasClass('m1'), \"The class should no longer be 'm1'.\");\n });\n test(\"should reappear on blur if empty\", function () {\n $('#m1').focus().blur();\n\n equal($('#m1').val(), \"Something\", \"The example should read 'Something'.\");\n ok($('#m1').hasClass('m1'), \"The class should be 'm1'.\");\n });\n test(\"should be overridden by arguments\", function () {\n $('#m2').example('Precedence', {className: 'o1'});\n\n equal($('#m2').val(), \"Precedence\", \"The example in the arguments should take precedence\");\n ok($('#m2').hasClass('o1'), \"The class should be 'o1'.\");\n });\n\n module(\"On page load\", {\n teardown: resetPlugin\n });\n test(\"should not set an example if a value is already set\", function () {\n $('#load1').example(\"Test\");\n\n equal($('#load1').val(), \"Already filled in\", \"The example should not be set.\");\n ok(!$('#load1').hasClass('example'), \"The class should not be 'example'.\");\n });\n test(\"should not clear a field with a value even when using a callback\", function () {\n $('#load2').example(function () {\n return \"Nope\";\n });\n\n equal($('#load2').val(), \"Default\", \"The value should be the default.\");\n ok(!$('#load2').hasClass('example'), \"The class should not be 'example'.\");\n });\n\n module(\"Changing values by Javascript\", {\n setup: function () {\n $('#f1').example('Example');\n },\n teardown: resetPlugin\n });\n test(\"should set example\", function () {\n equal($('#f1').val(), \"Example\", \"The example should read 'Example'.\");\n ok($('#f1').hasClass('example'), \"The example class should be set.\");\n });\n\n test(\"should remove example class when changed\", function () {\n $('#f1').val(\"New value\");\n $('#f1').change();\n\n equal($('#f1').val(), \"New value\", \"Value should be changed to 'New value'.\");\n ok(!$('#f1').hasClass('example'), \"The example class should no longer be set.\");\n\n /* Clear the field between test runs. */\n $('#f1').val('');\n });\n\n module(\"Clearing values when loaded from cache\", {\n teardown: resetPlugin\n });\n test(\"value should be set to default value\", function () {\n\n /* Fake loading from cache by setting the example to be different to\n * the recorded defaultValue.\n */\n $('#c1').val('Cached example').example('Cached example');\n equal($('#c1').val(), \"Filled in\", \"Value should have been reset to 'Filled in'.\");\n });\n test(\"value should be cleared and set to the example if without default\", function () {\n $('#c2').val('Cached example').example('Cached example');\n equal($('#c2').val(), 'Cached example', \"Value should have been emptied.\");\n ok($('#c2').hasClass('example'), 'The example class should be set.');\n });\n test(\"value is not touched if it doesn't match the example\", function () {\n $('#c3').val('Some user input').example('Test');\n equal($('#c3').val(), 'Some user input', 'Value should not have been modified.');\n ok(!$('#c3').hasClass('example'), 'The example class should not be set.');\n });\n test('value is always cleared if the example is a callback', function () {\n $('#c4').val('Some user input').example(function () {\n return 'Test';\n });\n equal($('#c4').val(), 'Test', 'The cached value is overridden.');\n ok($('#c4').hasClass('example'), 'The example class should be set.');\n });\n test('value is not touched if it is the default', function () {\n $('#c5').val('Some default').example('Test');\n equal($('#c5').val(), 'Some default', 'Value should not have been modified.');\n ok(!$('#c5').hasClass('example'), 'The example class should not be set.');\n });\n\n module('Custom events', {\n teardown: resetPlugin\n });\n test('a specific form is cleared when calling example:resetForm on it', function () {\n $('#ce1, #ce2').example('Testing');\n $('#custom').trigger('example:resetForm');\n equal($('#ce1').val(), '', 'The value should have been cleared.');\n ok(!$('#ce1').hasClass('example'), 'The example class should not be set.');\n equal($('#ce2').val(), 'Testing', 'The value should not have been cleared.');\n ok($('#ce2').hasClass('example'), 'The example class should be set.');\n });\n test('triggering example:resetForm on a field will bubble to the form', function () {\n $('#ce1').example('Testing');\n $('#ce1').trigger('example:resetForm');\n equal($('#ce1').val(), '', 'The value should have been cleared.');\n ok(!$('#ce1').hasClass('example'), 'The example class should not be set.');\n });\n}(jQuery));\n", "meta": {"content_hash": "36ff2e589dff2edf047d73e54038b016", "timestamp": "", "source": "github", "line_count": 281, "max_line_length": 99, "avg_line_length": 41.779359430604984, "alnum_prop": 0.547274275979557, "repo_name": "mudge/jquery_example", "id": "bc881493b21f22c98ac4deb594978428aff164c3", "size": "11740", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/jquery.example_test.js", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "CSS", "bytes": "4602"}, {"name": "HTML", "bytes": "3105"}, {"name": "JavaScript", "bytes": "66502"}, {"name": "Ruby", "bytes": "147"}]}} {"text": " pathMatchLength) {\n pathMatch = p.replace('*', normalized.substr(wIndex, normalized.length - curPath.length + 1));\n pathMatchLength = curMatchLength;\n }\n }\n }\n\n // when no path was matched, act like the standard rule is *: baseURL/*\n if (!pathMatch) {\n if (normalized.substr(0, loader.baseURL.length) == loader.baseURL)\n pathMatch = normalized.substr(loader.baseURL.length);\n else if (normalized.match(absURLRegEx))\n throw 'Unable to calculate canonical name to bundle ' + normalized;\n else\n pathMatch = normalized;\n }\n\n if (plugin) {\n if (loader.pluginFirst) {\n pathMatch = getCanonicalName(loader, plugin) + '!' + pathMatch;\n }\n else {\n pathMatch += '!' + getCanonicalName(loader, plugin);\n }\n }\n\n return pathMatch;\n}\n\nexports.getAlias = getAlias\nfunction getAlias(loader, canonicalName) {\n var bestAlias;\n\n function getBestAlias(mapped) {\n return canonicalName.substr(0, mapped.length) == mapped\n && (canonicalName.length == mapped.length || canonicalName[mapped.length + 1] == '/');\n }\n\n Object.keys(loader.map).forEach(function(alias) {\n if (getBestAlias(loader.map[alias]))\n bestAlias = alias;\n });\n\n if (bestAlias)\n return bestAlias;\n\n Object.keys(loader.packages).forEach(function(pkg) {\n Object.keys(loader.packages[pkg].map || {}).forEach(function(alias) {\n if (getBestAlias(loader.packages[pkg].map[alias]))\n bestAlias = alias;\n });\n });\n\n return bestAlias || canonicalName;\n}", "meta": {"content_hash": "461a16a1abe3111bec734ad1c864bb6e", "timestamp": "", "source": "github", "line_count": 155, "max_line_length": 111, "avg_line_length": 28.329032258064515, "alnum_prop": 0.6406285584149396, "repo_name": "synaptek/builder", "id": "93a55949e9728d5bc127000b355ec4bd5d83af27", "size": "4391", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/utils.js", "mode": "33188", "license": "mit", "language": [{"name": "HTML", "bytes": "7636"}, {"name": "JavaScript", "bytes": "113223"}]}} {"text": "package org.bitcoins.server\n\nimport akka.actor.ActorSystem\nimport akka.http.scaladsl.server._\nimport akka.http.scaladsl.server.Directives._\nimport akka.stream.ActorMaterializer\nimport org.bitcoins.node.Node\n\ncase class NodeRoutes(node: Node)(implicit system: ActorSystem)\n extends ServerRoute {\n implicit val materializer = ActorMaterializer()\n\n def handleCommand: PartialFunction[ServerCommand, StandardRoute] = {\n case ServerCommand(\"getpeers\", _) =>\n complete {\n Server.httpSuccess(\"TODO implement getpeers\")\n }\n }\n}\n", "meta": {"content_hash": "d057f744080eb16e27dd188a803ff6ea", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 70, "avg_line_length": 28.736842105263158, "alnum_prop": 0.7545787545787546, "repo_name": "bitcoin-s/bitcoin-s-core", "id": "c5db4d25695457a687cb9936687b2d53ada05d40", "size": "546", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/server/src/main/scala/org/bitcoins/server/NodeRoutes.scala", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "2266"}, {"name": "Dockerfile", "bytes": "148"}, {"name": "Java", "bytes": "44689"}, {"name": "JavaScript", "bytes": "22991"}, {"name": "Scala", "bytes": "2222831"}, {"name": "Shell", "bytes": "804"}]}} {"text": "Node.js - klaw\n==============\n\n\"JavaScript\n\nA Node.js file system walker extracted from [fs-extra](https://github.com/jprichardson/node-fs-extra).\n\n[![npm Package](https://img.shields.io/npm/v/klaw.svg?style=flat-square)](https://www.npmjs.org/package/klaw)\n[![build status](https://api.travis-ci.org/jprichardson/node-klaw.svg)](http://travis-ci.org/jprichardson/node-klaw)\n[![windows build status](https://ci.appveyor.com/api/projects/status/github/jprichardson/node-klaw?branch=master&svg=true)](https://ci.appveyor.com/project/jprichardson/node-klaw/branch/master)\n\nInstall\n-------\n\n npm i --save klaw\n\nIf you're using Typescript, we've got [types](https://github.com/DefinitelyTyped/DefinitelyTyped/pull/11492/files):\n\n npm i --save-dev @types/klaw\n\n\nName\n----\n\n`klaw` is `walk` backwards :p\n\n\nSync\n----\n\nIf you need the same functionality but synchronous, you can use [klaw-sync](https://github.com/manidlou/node-klaw-sync).\n\n\nUsage\n-----\n\n### klaw(directory, [options])\n\nReturns a [Readable stream](https://nodejs.org/api/stream.html#stream_class_stream_readable) that iterates\nthrough every file and directory starting with `dir` as the root. Every `read()` or `data` event\nreturns an object with two properties: `path` and `stats`. `path` is the full path of the file and\n`stats` is an instance of [fs.Stats](https://nodejs.org/api/fs.html#fs_class_fs_stats).\n\n- `directory`: The directory to recursively walk. Type `string`.\n- `options`: [Readable stream options](https://nodejs.org/api/stream.html#stream_new_stream_readable_options) and\nthe following:\n - `queueMethod` (`string`, default: `'shift'`): Either `'shift'` or `'pop'`. On `readdir()` array, call either `shift()` or `pop()`.\n - `pathSorter` (`function`, default: `undefined`): Sorting [function for Arrays](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort).\n - `fs` (`object`, default: [`graceful-fs`](https://github.com/isaacs/node-graceful-fs)): Use this to hook into the `fs` methods or to use [`mock-fs`](https://github.com/tschaub/mock-fs)\n - `filter` (`function`, default: `undefined`): Filtering [function for Arrays](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/filter)\n - `depthLimit` (`number`, default: `undefined`): The number of times to recurse before stopping. -1 for unlimited.\n - `preserveSymlinks` (`boolean`, default: `false`): Whether symlinks should be followed or treated as items themselves. If true, symlinks will be returned as items in their own right. If false, the linked item will be returned and potentially recursed into, in its stead.\n\n**Streams 1 (push) example:**\n\n```js\nconst klaw = require('klaw')\n\nconst items = [] // files, directories, symlinks, etc\nklaw('/some/dir')\n .on('data', item => items.push(item.path))\n .on('end', () => console.dir(items)) // => [ ... array of files]\n```\n\n**Streams 2 & 3 (pull) example:**\n\n```js\nconst klaw = require('klaw')\n\nconst items = [] // files, directories, symlinks, etc\nklaw('/some/dir')\n .on('readable', function () {\n let item\n while ((item = this.read())) {\n items.push(item.path)\n }\n })\n .on('end', () => console.dir(items)) // => [ ... array of files]\n```\n\n**```for-await-of``` example:**\n\n```js\nfor await (const file of klaw('/some/dir')) {\n console.log(file)\n}\n```\n\n### Error Handling\n\nListen for the `error` event.\n\nExample:\n\n```js\nconst klaw = require('klaw')\n\nklaw('/some/dir')\n .on('readable', function () {\n let item\n while ((item = this.read())) {\n // do something with the file\n }\n })\n .on('error', (err, item) => {\n console.log(err.message)\n console.log(item.path) // the file the error occurred on\n })\n .on('end', () => console.dir(items)) // => [ ... array of files]\n```\n\n\n### Aggregation / Filtering / Executing Actions (Through Streams)\n\nOn many occasions you may want to filter files based upon size, extension, etc.\nOr you may want to aggregate stats on certain file types. Or maybe you want to\nperform an action on certain file types.\n\nYou should use the module [`through2`](https://www.npmjs.com/package/through2) to easily\naccomplish this.\n\nInstall `through2`:\n\n npm i --save through2\n\n\n**Example (skipping directories):**\n\n```js\nconst klaw = require('klaw')\nconst through2 = require('through2')\n\nconst excludeDirFilter = through2.obj(function (item, enc, next) {\n if (!item.stats.isDirectory()) this.push(item)\n next()\n})\n\nconst items = [] // files, directories, symlinks, etc\nklaw('/some/dir')\n .pipe(excludeDirFilter)\n .on('data', item => items.push(item.path))\n .on('end', () => console.dir(items)) // => [ ... array of files without directories]\n```\n\n**Example (ignore hidden directories):**\n\n```js\nconst klaw = require('klaw')\nconst path = require('path')\n\nconst filterFunc = item => {\n const basename = path.basename(item)\n return basename === '.' || basename[0] !== '.'\n}\n\nklaw('/some/dir', { filter: filterFunc })\n .on('data', item => {\n // only items of none hidden folders will reach here\n })\n```\n\n**Example (totaling size of PNG files):**\n\n```js\nconst klaw = require('klaw')\nconst path = require('path')\nconst through2 = require('through2')\n\nlet totalPngsInBytes = 0\nconst aggregatePngSize = through2.obj(function (item, enc, next) {\n if (path.extname(item.path) === '.png') {\n totalPngsInBytes += item.stats.size\n }\n this.push(item)\n next()\n})\n\nklaw('/some/dir')\n .pipe(aggregatePngSize)\n .on('data', item => items.push(item.path))\n .on('end', () => console.dir(totalPngsInBytes)) // => total of all pngs (bytes)\n```\n\n\n**Example (deleting all .tmp files):**\n\n```js\nconst fs = require('fs')\nconst klaw = require('klaw')\nconst through2 = require('through2')\n\nconst deleteAction = through2.obj(function (item, enc, next) {\n this.push(item)\n\n if (path.extname(item.path) === '.tmp') {\n item.deleted = true\n fs.unlink(item.path, next)\n } else {\n item.deleted = false\n next()\n }\n})\n\nconst deletedFiles = []\nklaw('/some/dir')\n .pipe(deleteAction)\n .on('data', item => {\n if (!item.deleted) return\n deletedFiles.push(item.path)\n })\n .on('end', () => console.dir(deletedFiles)) // => all deleted files\n```\n\nYou can even chain a bunch of these filters and aggregators together. By using\nmultiple pipes.\n\n**Example (using multiple filters / aggregators):**\n\n```js\nklaw('/some/dir')\n .pipe(filterCertainFiles)\n .pipe(deleteSomeOtherFiles)\n .on('end', () => console.log('all done!'))\n```\n\n**Example passing (piping) through errors:**\n\nNode.js does not `pipe()` errors. This means that the error on one stream, like\n`klaw` will not pipe through to the next. If you want to do this, do the following:\n\n```js\nconst klaw = require('klaw')\nconst through2 = require('through2')\n\nconst excludeDirFilter = through2.obj(function (item, enc, next) {\n if (!item.stats.isDirectory()) this.push(item)\n next()\n})\n\nconst items = [] // files, directories, symlinks, etc\nklaw('/some/dir')\n .on('error', err => excludeDirFilter.emit('error', err)) // forward the error on\n .pipe(excludeDirFilter)\n .on('data', item => items.push(item.path))\n .on('end', () => console.dir(items)) // => [ ... array of files without directories]\n```\n\n\n### Searching Strategy\n\nPass in options for `queueMethod`, `pathSorter`, and `depthLimit` to affect how the file system\nis recursively iterated. See the code for more details, it's less than 50 lines :)\n\n\n\nLicense\n-------\n\nMIT\n\nCopyright (c) 2015 [JP Richardson](https://github.com/jprichardson)\n", "meta": {"content_hash": "c40fc12d4ab5dceefb1c12fadbedf6c9", "timestamp": "", "source": "github", "line_count": 261, "max_line_length": 273, "avg_line_length": 29.448275862068964, "alnum_prop": 0.6731720010408535, "repo_name": "jprichardson/node-klaw", "id": "58639696d86cee533eac91a2beb816ce134ce8a3", "size": "7686", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [{"name": "JavaScript", "bytes": "11194"}]}} {"text": "``` diff\n+namespace System.Drawing.Printing {\n+ public enum Duplex {\n+ Default = -1,\n+ Horizontal = 3,\n+ Simplex = 1,\n+ Vertical = 2,\n+ }\n+ public class InvalidPrinterException : SystemException {\n+ public InvalidPrinterException(PrinterSettings settings);\n+ protected InvalidPrinterException(SerializationInfo info, StreamingContext context);\n+ public override void GetObjectData(SerializationInfo info, StreamingContext context);\n+ }\n+ public class Margins : ICloneable {\n+ public Margins();\n+ public Margins(int left, int right, int top, int bottom);\n+ public int Bottom { get; set; }\n+ public int Left { get; set; }\n+ public int Right { get; set; }\n+ public int Top { get; set; }\n+ public object Clone();\n+ public override bool Equals(object obj);\n+ public override int GetHashCode();\n+ public static bool operator ==(Margins m1, Margins m2);\n+ public static bool operator !=(Margins m1, Margins m2);\n+ public override string ToString();\n+ }\n+ public class PageSettings : ICloneable {\n+ public PageSettings();\n+ public PageSettings(PrinterSettings printerSettings);\n+ public Rectangle Bounds { get; }\n+ public bool Color { get; set; }\n+ public float HardMarginX { get; }\n+ public float HardMarginY { get; }\n+ public bool Landscape { get; set; }\n+ public Margins Margins { get; set; }\n+ public PaperSize PaperSize { get; set; }\n+ public PaperSource PaperSource { get; set; }\n+ public RectangleF PrintableArea { get; }\n+ public PrinterResolution PrinterResolution { get; set; }\n+ public PrinterSettings PrinterSettings { get; set; }\n+ public object Clone();\n+ public void CopyToHdevmode(IntPtr hdevmode);\n+ public void SetHdevmode(IntPtr hdevmode);\n+ public override string ToString();\n+ }\n+ public enum PaperKind {\n+ A2 = 66,\n+ A3 = 8,\n+ A3Extra = 63,\n+ A3ExtraTransverse = 68,\n+ A3Rotated = 76,\n+ A3Transverse = 67,\n+ A4 = 9,\n+ A4Extra = 53,\n+ A4Plus = 60,\n+ A4Rotated = 77,\n+ A4Small = 10,\n+ A4Transverse = 55,\n+ A5 = 11,\n+ A5Extra = 64,\n+ A5Rotated = 78,\n+ A5Transverse = 61,\n+ A6 = 70,\n+ A6Rotated = 83,\n+ APlus = 57,\n+ B4 = 12,\n+ B4Envelope = 33,\n+ B4JisRotated = 79,\n+ B5 = 13,\n+ B5Envelope = 34,\n+ B5Extra = 65,\n+ B5JisRotated = 80,\n+ B5Transverse = 62,\n+ B6Envelope = 35,\n+ B6Jis = 88,\n+ B6JisRotated = 89,\n+ BPlus = 58,\n+ C3Envelope = 29,\n+ C4Envelope = 30,\n+ C5Envelope = 28,\n+ C65Envelope = 32,\n+ C6Envelope = 31,\n+ CSheet = 24,\n+ Custom = 0,\n+ DLEnvelope = 27,\n+ DSheet = 25,\n+ ESheet = 26,\n+ Executive = 7,\n+ Folio = 14,\n+ GermanLegalFanfold = 41,\n+ GermanStandardFanfold = 40,\n+ InviteEnvelope = 47,\n+ IsoB4 = 42,\n+ ItalyEnvelope = 36,\n+ JapaneseDoublePostcard = 69,\n+ JapaneseDoublePostcardRotated = 82,\n+ JapaneseEnvelopeChouNumber3 = 73,\n+ JapaneseEnvelopeChouNumber3Rotated = 86,\n+ JapaneseEnvelopeChouNumber4 = 74,\n+ JapaneseEnvelopeChouNumber4Rotated = 87,\n+ JapaneseEnvelopeKakuNumber2 = 71,\n+ JapaneseEnvelopeKakuNumber2Rotated = 84,\n+ JapaneseEnvelopeKakuNumber3 = 72,\n+ JapaneseEnvelopeKakuNumber3Rotated = 85,\n+ JapaneseEnvelopeYouNumber4 = 91,\n+ JapaneseEnvelopeYouNumber4Rotated = 92,\n+ JapanesePostcard = 43,\n+ JapanesePostcardRotated = 81,\n+ Ledger = 4,\n+ Legal = 5,\n+ LegalExtra = 51,\n+ Letter = 1,\n+ LetterExtra = 50,\n+ LetterExtraTransverse = 56,\n+ LetterPlus = 59,\n+ LetterRotated = 75,\n+ LetterSmall = 2,\n+ LetterTransverse = 54,\n+ MonarchEnvelope = 37,\n+ Note = 18,\n+ Number10Envelope = 20,\n+ Number11Envelope = 21,\n+ Number12Envelope = 22,\n+ Number14Envelope = 23,\n+ Number9Envelope = 19,\n+ PersonalEnvelope = 38,\n+ Prc16K = 93,\n+ Prc16KRotated = 106,\n+ Prc32K = 94,\n+ Prc32KBig = 95,\n+ Prc32KBigRotated = 108,\n+ Prc32KRotated = 107,\n+ PrcEnvelopeNumber1 = 96,\n+ PrcEnvelopeNumber10 = 105,\n+ PrcEnvelopeNumber10Rotated = 118,\n+ PrcEnvelopeNumber1Rotated = 109,\n+ PrcEnvelopeNumber2 = 97,\n+ PrcEnvelopeNumber2Rotated = 110,\n+ PrcEnvelopeNumber3 = 98,\n+ PrcEnvelopeNumber3Rotated = 111,\n+ PrcEnvelopeNumber4 = 99,\n+ PrcEnvelopeNumber4Rotated = 112,\n+ PrcEnvelopeNumber5 = 100,\n+ PrcEnvelopeNumber5Rotated = 113,\n+ PrcEnvelopeNumber6 = 101,\n+ PrcEnvelopeNumber6Rotated = 114,\n+ PrcEnvelopeNumber7 = 102,\n+ PrcEnvelopeNumber7Rotated = 115,\n+ PrcEnvelopeNumber8 = 103,\n+ PrcEnvelopeNumber8Rotated = 116,\n+ PrcEnvelopeNumber9 = 104,\n+ PrcEnvelopeNumber9Rotated = 117,\n+ Quarto = 15,\n+ Standard10x11 = 45,\n+ Standard10x14 = 16,\n+ Standard11x17 = 17,\n+ Standard12x11 = 90,\n+ Standard15x11 = 46,\n+ Standard9x11 = 44,\n+ Statement = 6,\n+ Tabloid = 3,\n+ TabloidExtra = 52,\n+ USStandardFanfold = 39,\n+ }\n+ public class PaperSize {\n+ public PaperSize();\n+ public PaperSize(string name, int width, int height);\n+ public int Height { get; set; }\n+ public PaperKind Kind { get; }\n+ public string PaperName { get; set; }\n+ public int RawKind { get; set; }\n+ public int Width { get; set; }\n+ public override string ToString();\n+ }\n+ public class PaperSource {\n+ public PaperSource();\n+ public PaperSourceKind Kind { get; }\n+ public int RawKind { get; set; }\n+ public string SourceName { get; set; }\n+ public override string ToString();\n+ }\n+ public enum PaperSourceKind {\n+ AutomaticFeed = 7,\n+ Cassette = 14,\n+ Custom = 257,\n+ Envelope = 5,\n+ FormSource = 15,\n+ LargeCapacity = 11,\n+ LargeFormat = 10,\n+ Lower = 2,\n+ Manual = 4,\n+ ManualFeed = 6,\n+ Middle = 3,\n+ SmallFormat = 9,\n+ TractorFeed = 8,\n+ Upper = 1,\n+ }\n+ public sealed class PreviewPageInfo {\n+ public PreviewPageInfo(Image image, Size physicalSize);\n+ public Image Image { get; }\n+ public Size PhysicalSize { get; }\n+ }\n+ public class PreviewPrintController : PrintController {\n+ public PreviewPrintController();\n+ public override bool IsPreview { get; }\n+ public virtual bool UseAntiAlias { get; set; }\n+ public PreviewPageInfo[] GetPreviewPageInfo();\n+ public override void OnEndPage(PrintDocument document, PrintPageEventArgs e);\n+ public override void OnEndPrint(PrintDocument document, PrintEventArgs e);\n+ public override Graphics OnStartPage(PrintDocument document, PrintPageEventArgs e);\n+ public override void OnStartPrint(PrintDocument document, PrintEventArgs e);\n+ }\n+ public enum PrintAction {\n+ PrintToFile = 0,\n+ PrintToPreview = 1,\n+ PrintToPrinter = 2,\n+ }\n+ public abstract class PrintController {\n+ protected PrintController();\n+ public virtual bool IsPreview { get; }\n+ public virtual void OnEndPage(PrintDocument document, PrintPageEventArgs e);\n+ public virtual void OnEndPrint(PrintDocument document, PrintEventArgs e);\n+ public virtual Graphics OnStartPage(PrintDocument document, PrintPageEventArgs e);\n+ public virtual void OnStartPrint(PrintDocument document, PrintEventArgs e);\n+ }\n+ public class PrintDocument : Component {\n+ public PrintDocument();\n+ public PageSettings DefaultPageSettings { get; set; }\n+ public string DocumentName { get; set; }\n+ public bool OriginAtMargins { get; set; }\n+ public PrintController PrintController { get; set; }\n+ public PrinterSettings PrinterSettings { get; set; }\n+ public event PrintEventHandler BeginPrint;\n+ public event PrintEventHandler EndPrint;\n+ public event PrintPageEventHandler PrintPage;\n+ public event QueryPageSettingsEventHandler QueryPageSettings;\n+ protected virtual void OnBeginPrint(PrintEventArgs e);\n+ protected virtual void OnEndPrint(PrintEventArgs e);\n+ protected virtual void OnPrintPage(PrintPageEventArgs e);\n+ protected virtual void OnQueryPageSettings(QueryPageSettingsEventArgs e);\n+ public void Print();\n+ public override string ToString();\n+ }\n+ public class PrinterResolution {\n+ public PrinterResolution();\n+ public PrinterResolutionKind Kind { get; set; }\n+ public int X { get; set; }\n+ public int Y { get; set; }\n+ public override string ToString();\n+ }\n+ public enum PrinterResolutionKind {\n+ Custom = 0,\n+ Draft = -1,\n+ High = -4,\n+ Low = -2,\n+ Medium = -3,\n+ }\n+ public class PrinterSettings : ICloneable {\n+ public PrinterSettings();\n+ public bool CanDuplex { get; }\n+ public bool Collate { get; set; }\n+ public short Copies { get; set; }\n+ public PageSettings DefaultPageSettings { get; }\n+ public Duplex Duplex { get; set; }\n+ public int FromPage { get; set; }\n+ public static PrinterSettings.StringCollection InstalledPrinters { get; }\n+ public bool IsDefaultPrinter { get; }\n+ public bool IsPlotter { get; }\n+ public bool IsValid { get; }\n+ public int LandscapeAngle { get; }\n+ public int MaximumCopies { get; }\n+ public int MaximumPage { get; set; }\n+ public int MinimumPage { get; set; }\n+ public PrinterSettings.PaperSizeCollection PaperSizes { get; }\n+ public PrinterSettings.PaperSourceCollection PaperSources { get; }\n+ public string PrinterName { get; set; }\n+ public PrinterSettings.PrinterResolutionCollection PrinterResolutions { get; }\n+ public string PrintFileName { get; set; }\n+ public PrintRange PrintRange { get; set; }\n+ public bool PrintToFile { get; set; }\n+ public bool SupportsColor { get; }\n+ public int ToPage { get; set; }\n+ public object Clone();\n+ public Graphics CreateMeasurementGraphics();\n+ public Graphics CreateMeasurementGraphics(bool honorOriginAtMargins);\n+ public Graphics CreateMeasurementGraphics(PageSettings pageSettings);\n+ public Graphics CreateMeasurementGraphics(PageSettings pageSettings, bool honorOriginAtMargins);\n+ public IntPtr GetHdevmode();\n+ public IntPtr GetHdevmode(PageSettings pageSettings);\n+ public IntPtr GetHdevnames();\n+ public bool IsDirectPrintingSupported(Image image);\n+ public bool IsDirectPrintingSupported(ImageFormat imageFormat);\n+ public void SetHdevmode(IntPtr hdevmode);\n+ public void SetHdevnames(IntPtr hdevnames);\n+ public override string ToString();\n+ public class PaperSizeCollection : ICollection, IEnumerable {\n+ public PaperSizeCollection(PaperSize[] array);\n+ public int Count { get; }\n+ int System.Collections.ICollection.Count { get; }\n+ bool System.Collections.ICollection.IsSynchronized { get; }\n+ object System.Collections.ICollection.SyncRoot { get; }\n+ public virtual PaperSize this[int index] { get; }\n+ public int Add(PaperSize paperSize);\n+ public void CopyTo(PaperSize[] paperSizes, int index);\n+ public IEnumerator GetEnumerator();\n+ void System.Collections.ICollection.CopyTo(Array array, int index);\n+ IEnumerator System.Collections.IEnumerable.GetEnumerator();\n+ }\n+ public class PaperSourceCollection : ICollection, IEnumerable {\n+ public PaperSourceCollection(PaperSource[] array);\n+ public int Count { get; }\n+ int System.Collections.ICollection.Count { get; }\n+ bool System.Collections.ICollection.IsSynchronized { get; }\n+ object System.Collections.ICollection.SyncRoot { get; }\n+ public virtual PaperSource this[int index] { get; }\n+ public int Add(PaperSource paperSource);\n+ public void CopyTo(PaperSource[] paperSources, int index);\n+ public IEnumerator GetEnumerator();\n+ void System.Collections.ICollection.CopyTo(Array array, int index);\n+ IEnumerator System.Collections.IEnumerable.GetEnumerator();\n+ }\n+ public class PrinterResolutionCollection : ICollection, IEnumerable {\n+ public PrinterResolutionCollection(PrinterResolution[] array);\n+ public int Count { get; }\n+ int System.Collections.ICollection.Count { get; }\n+ bool System.Collections.ICollection.IsSynchronized { get; }\n+ object System.Collections.ICollection.SyncRoot { get; }\n+ public virtual PrinterResolution this[int index] { get; }\n+ public int Add(PrinterResolution printerResolution);\n+ public void CopyTo(PrinterResolution[] printerResolutions, int index);\n+ public IEnumerator GetEnumerator();\n+ void System.Collections.ICollection.CopyTo(Array array, int index);\n+ IEnumerator System.Collections.IEnumerable.GetEnumerator();\n+ }\n+ public class StringCollection : ICollection, IEnumerable {\n+ public StringCollection(string[] array);\n+ public int Count { get; }\n+ int System.Collections.ICollection.Count { get; }\n+ bool System.Collections.ICollection.IsSynchronized { get; }\n+ object System.Collections.ICollection.SyncRoot { get; }\n+ public virtual string this[int index] { get; }\n+ public int Add(string value);\n+ public void CopyTo(string[] strings, int index);\n+ public IEnumerator GetEnumerator();\n+ void System.Collections.ICollection.CopyTo(Array array, int index);\n+ IEnumerator System.Collections.IEnumerable.GetEnumerator();\n+ }\n+ }\n+ public enum PrinterUnit {\n+ Display = 0,\n+ HundredthsOfAMillimeter = 2,\n+ TenthsOfAMillimeter = 3,\n+ ThousandthsOfAnInch = 1,\n+ }\n+ public sealed class PrinterUnitConvert {\n+ public static double Convert(double value, PrinterUnit fromUnit, PrinterUnit toUnit);\n+ public static Point Convert(Point value, PrinterUnit fromUnit, PrinterUnit toUnit);\n+ public static Margins Convert(Margins value, PrinterUnit fromUnit, PrinterUnit toUnit);\n+ public static Rectangle Convert(Rectangle value, PrinterUnit fromUnit, PrinterUnit toUnit);\n+ public static Size Convert(Size value, PrinterUnit fromUnit, PrinterUnit toUnit);\n+ public static int Convert(int value, PrinterUnit fromUnit, PrinterUnit toUnit);\n+ }\n+ public class PrintEventArgs : CancelEventArgs {\n+ public PrintEventArgs();\n+ public PrintAction PrintAction { get; }\n+ }\n+ public delegate void PrintEventHandler(object sender, PrintEventArgs e);\n+ public sealed class PrintingPermission : CodeAccessPermission, IUnrestrictedPermission {\n+ public PrintingPermission(PrintingPermissionLevel printingLevel);\n+ public PrintingPermission(PermissionState state);\n+ public PrintingPermissionLevel Level { get; set; }\n+ public override IPermission Copy();\n+ public override void FromXml(SecurityElement element);\n+ public override IPermission Intersect(IPermission target);\n+ public override bool IsSubsetOf(IPermission target);\n+ public bool IsUnrestricted();\n+ public override SecurityElement ToXml();\n+ public override IPermission Union(IPermission target);\n+ }\n+ public sealed class PrintingPermissionAttribute : CodeAccessSecurityAttribute {\n+ public PrintingPermissionAttribute(SecurityAction action);\n+ public PrintingPermissionLevel Level { get; set; }\n+ public override IPermission CreatePermission();\n+ }\n+ public enum PrintingPermissionLevel {\n+ AllPrinting = 3,\n+ DefaultPrinting = 2,\n+ NoPrinting = 0,\n+ SafePrinting = 1,\n+ }\n+ public class PrintPageEventArgs : EventArgs {\n+ public PrintPageEventArgs(Graphics graphics, Rectangle marginBounds, Rectangle pageBounds, PageSettings pageSettings);\n+ public bool Cancel { get; set; }\n+ public Graphics Graphics { get; }\n+ public bool HasMorePages { get; set; }\n+ public Rectangle MarginBounds { get; }\n+ public Rectangle PageBounds { get; }\n+ public PageSettings PageSettings { get; }\n+ }\n+ public delegate void PrintPageEventHandler(object sender, PrintPageEventArgs e);\n+ public enum PrintRange {\n+ AllPages = 0,\n+ CurrentPage = 4194304,\n+ Selection = 1,\n+ SomePages = 2,\n+ }\n+ public class QueryPageSettingsEventArgs : PrintEventArgs {\n+ public QueryPageSettingsEventArgs(PageSettings pageSettings);\n+ public PageSettings PageSettings { get; set; }\n+ }\n+ public delegate void QueryPageSettingsEventHandler(object sender, QueryPageSettingsEventArgs e);\n+ public class StandardPrintController : PrintController {\n+ public StandardPrintController();\n+ public override void OnEndPage(PrintDocument document, PrintPageEventArgs e);\n+ public override void OnEndPrint(PrintDocument document, PrintEventArgs e);\n+ public override Graphics OnStartPage(PrintDocument document, PrintPageEventArgs e);\n+ public override void OnStartPrint(PrintDocument document, PrintEventArgs e);\n+ }\n+}\n```\n\n", "meta": {"content_hash": "e1fc4d3ca5e16fbf35837899d48cd483", "timestamp": "", "source": "github", "line_count": 422, "max_line_length": 127, "avg_line_length": 42.86492890995261, "alnum_prop": 0.6315993145005252, "repo_name": "ericstj/standard", "id": "243b64840b63c39b6fa170dde097b3b34bf51c31", "size": "18116", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "docs/comparisons/netstandard2.0_vs_netcoreapp2.0/System.Drawing.Printing.md", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "11626"}, {"name": "C#", "bytes": "24604928"}, {"name": "CMake", "bytes": "9153"}, {"name": "PowerShell", "bytes": "69131"}, {"name": "Shell", "bytes": "64844"}]}} {"text": "\n\nusing System;\nusing System.IO;\nusing Dapper;\nusing Newtonsoft.Json;\n\nnamespace SteamDatabaseBackend\n{\n static class Settings\n {\n private static SettingsJson _current = new SettingsJson();\n\n public static bool IsFullRun { get; private set; }\n\n public static SettingsJson Current\n {\n get\n {\n return _current;\n }\n }\n\n public static void Load()\n {\n string settingsFile = Path.Combine(Application.Path, \"settings.json\");\n\n if (!File.Exists(settingsFile))\n {\n throw new FileNotFoundException(\"settings.json file does not exist. Rename and edit settings.json.default file.\");\n }\n\n _current = JsonConvert.DeserializeObject(File.ReadAllText(settingsFile), new JsonSerializerSettings { MissingMemberHandling = MissingMemberHandling.Error }) ?? new SettingsJson();\n }\n\n public static void Initialize()\n {\n if (string.IsNullOrWhiteSpace(Current.Steam.Username) || string.IsNullOrWhiteSpace(Current.Steam.Password))\n {\n throw new InvalidDataException(\"Missing Steam credentials in settings file\");\n }\n\n // Test database connection, it will throw if connection is unable to be made\n using (var connection = Database.GetConnection())\n {\n // Clear GC status table while we're at it\n connection.Execute(\"DELETE FROM `GC`\");\n }\n\n if (Current.FullRun != FullRunState.None)\n {\n IsFullRun = true;\n\n Log.WriteInfo(\"Settings\", \"Running full update with option \\\"{0}\\\"\", Current.FullRun);\n\n // Don't log full runs, regardless of setting\n Current.LogToFile = false;\n\n // Don't connect to IRC while doing a full run\n Current.IRC.Enabled = false;\n }\n else if (!Current.LogToFile)\n {\n Log.WriteInfo(\"Settings\", \"File logging is disabled\");\n }\n\n Current.IRC.Enabled = CanConnectToIRC();\n }\n\n private static bool CanConnectToIRC()\n {\n if (!Current.IRC.Enabled)\n {\n Log.WriteWarn(\"Settings\", \"IRC is disabled in settings\");\n return false;\n }\n\n if (string.IsNullOrEmpty(Current.IRC.Server) || Current.IRC.Port <= 0)\n {\n Log.WriteWarn(\"Settings\", \"Missing IRC details in settings file, not connecting\");\n return false;\n }\n\n if (string.IsNullOrWhiteSpace(Current.IRC.Nickname))\n {\n Log.WriteError(\"Settings\", \"Missing IRC nickname in settings file, not connecting\");\n return false;\n }\n\n if (string.IsNullOrWhiteSpace(Current.IRC.Password))\n {\n Current.IRC.Password = null;\n }\n\n return true;\n }\n }\n}\n", "meta": {"content_hash": "e61ab38354f48856c25bb877b19e8cbd", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 205, "avg_line_length": 31.20408163265306, "alnum_prop": 0.5464355788096795, "repo_name": "SGColdSun/SteamDatabaseBackend", "id": "70755525e2a0bc01e3cf4d62208a44cce04e2600", "size": "3226", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Util/Settings.cs", "mode": "33188", "license": "bsd-3-clause", "language": [{"name": "C#", "bytes": "284296"}, {"name": "Shell", "bytes": "452"}]}} {"text": "\n\n\n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n\n \n \n\n \n \n \n \n \n \n \n \n \n \n \n statsmodels.robust.norms.LeastSquares.psi_deriv — statsmodels\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n Skip to content \n
\n \n
\n\n \n
\n \n \n \n \n
\n
\n \n
\n
\n
\n \n
\n
\n
\n
\n
\n
\n \n\n
\n
\n
\n \n
\n
\n \n
\n

statsmodels.robust.norms.LeastSquares.psi_deriv\u00b6

\n
\n
\nLeastSquares.psi_deriv(z)[source]\u00b6
\n

The derivative of the least squares psi function.

\n
\n
Returns:
\n
\n
psi_derivndarray

ones(z.shape)

\n
\n
\n
\n
\n

Notes

\n

Used to estimate the robust covariance matrix.

\n
\n
\n\n\n
\n
\n
\n
\n
\n \n \n \n \n", "meta": {"content_hash": "58fcb59dcb5ad9b3b04449f825a38840", "timestamp": "", "source": "github", "line_count": 516, "max_line_length": 999, "avg_line_length": 38.27713178294574, "alnum_prop": 0.5973874740519467, "repo_name": "statsmodels/statsmodels.github.io", "id": "bc44c2fca922ef0db0d780919012801fd87b3e05", "size": "19755", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "v0.13.3/generated/statsmodels.robust.norms.LeastSquares.psi_deriv.html", "mode": "33188", "license": "bsd-3-clause", "language": []}} {"text": "\ufeffusing System;\nusing CVaS.BL.Common;\nusing CVaS.Shared.Options;\nusing CVaS.Shared.Helpers;\nusing CVaS.Web.Authentication;\nusing CVaS.Web.Installers;\nusing Microsoft.AspNetCore.Builder;\nusing Microsoft.AspNetCore.Hosting;\nusing Microsoft.Extensions.Configuration;\nusing Microsoft.Extensions.DependencyInjection;\nusing Microsoft.Extensions.Logging;\nusing Microsoft.Extensions.PlatformAbstractions;\nusing DryIoc;\nusing DryIoc.Microsoft.DependencyInjection;\nusing StackExchange.Profiling.Storage;\nusing Microsoft.Extensions.Caching.Memory;\nusing CVaS.Web.Helpers;\nusing Swashbuckle.AspNetCore.Swagger;\nusing Swashbuckle.AspNetCore.SwaggerGen;\nusing CVaS.Web.Swagger;\n\nnamespace CVaS.Web\n{\n public class Startup\n {\n private readonly IHostingEnvironment _hostingEnvironment;\n private readonly ModeOptions _modeOptions = new ModeOptions();\n\n public Startup(IHostingEnvironment env, ILoggerFactory loggerFactory)\n {\n Configuration = new ConfigurationBuilder()\n .SetBasePath(env.ContentRootPath)\n .AddJsonFile(\"appsettings.json\", optional: true, reloadOnChange: true)\n .AddJsonFile($\"appsettings.{env.EnvironmentName}.json\", optional: true)\n .AddEnvironmentVariables()\n .Build();\n\n loggerFactory\n .AddConsole(Configuration.GetSection(\"Logging\"))\n .AddDebug();\n\n _hostingEnvironment = env;\n }\n\n public IConfigurationRoot Configuration { get; }\n\n // This method gets called by the runtime. Use this method to add services to the container.\n public IServiceProvider ConfigureServices(IServiceCollection services)\n {\n services.AddCustomOptions(Configuration);\n Configuration.GetSection(\"Mode\").Bind(_modeOptions);\n\n services.AddCustomizedIdentity();\n services.AddApiAuthentication(option =>\n {\n option.AuthenticationScheme = AuthenticationSchemes.ApiKey;\n option.HeaderScheme = \"Simple\";\n });\n\n services.AddDatabaseServices(Configuration);\n services.AddStorageServices(Configuration);\n services.AddCustomizedMvc();\n services.AddMemoryCache();\n // Inject an implementation of ISwaggerProvider with defaulted settings applied\n services.AddSwaggerGen(ConfigureSwagger);\n services.AddMiniProfiler();\n //.AddEntityFramework();\n\n if (_modeOptions.IsLocal)\n {\n services.AddJobsService(Configuration);\n }\n else\n {\n services.AddMessageBroker(Configuration);\n }\n\n services.AddTransient();\n services.AddSingleton(Configuration);\n\n var physicalProvider = _hostingEnvironment.ContentRootFileProvider;\n // It's null when using ef migrations tools so we need to check first to not to throw exc\n if (physicalProvider != null) services.AddSingleton(physicalProvider);\n\n return new Container(Rules.Default\n .WithCaptureContainerDisposeStackTrace()\n .WithoutThrowIfDependencyHasShorterReuseLifespan()\n .WithImplicitRootOpenScope())\n .WithDependencyInjectionAdapter(services,\n throwIfUnresolved: type => type.Name.EndsWith(\"Controller\")) \n .ConfigureServiceProvider();\n }\n\n // This method gets called by the runtime. Use this method to configure the HTTP request pipeline.\n public void Configure(IApplicationBuilder app, IHostingEnvironment env, IMemoryCache cache, IContainer container) \n {\n if (env.IsDevelopment())\n {\n app.UseDeveloperExceptionPage();\n }\n\n app.UseStaticFiles();\n\n if (env.IsDevelopment())\n {\n app.UseMiniProfiler(o =>\n {\n o.RouteBasePath = \"~/profiler\";\n o.SqlFormatter = new StackExchange.Profiling.SqlFormatters.InlineFormatter();\n o.Storage = new MemoryCacheStorage(cache, TimeSpan.FromMinutes(20));\n });\n }\n\n app.UseAuthentication();\n\n app.UseMvc(routes =>\n {\n routes.MapRoute(\n name: \"default\",\n template: \"{controller=Home}/{action=Index}/{id?}\");\n });\n\n // Enable middleware to serve generated Swagger as a JSON endpoint\n app.UseSwagger();\n // Enable middleware to serve swagger-ui assets (HTML, JS, CSS etc.)\n app.UseSwaggerUI(c =>\n {\n c.SwaggerEndpoint(\"/swagger/v1/swagger.json\", \"API V1\");\n c.InjectStylesheet(\"/lib/swagger-ui-themes/themes/2.x/theme-flattop.css\");\n });\n\n if (_modeOptions.IsLocal)\n {\n ServicesExtensions.InitializeJobs(container);\n }\n }\n\n private static void ConfigureSwagger(SwaggerGenOptions options)\n {\n options.SwaggerDoc(\"v1\", new Info\n {\n Version = \"v1\",\n Title = \"Computer Vision as Service API\",\n Description = \"A simple api to run computer vision algorithms.\",\n TermsOfService = \"None\",\n Contact = new Contact\n {\n Name = \"Adam Je\u017e\",\n Email = \"adamjez@outlook.cz\"\n }\n });\n options.AddSecurityDefinition(\"ApiKey\", new ApiKeyScheme()\n {\n In = \"header\",\n Name = \"Authorization\",\n Description = \"Api Key Authentication\",\n Type = \"apiKey\"\n });\n\n options.DocumentFilter();\n options.OperationFilter();\n options.OperationFilter();\n\n\n var basePath = PlatformServices.Default.Application.ApplicationBasePath;\n var pathToDoc = System.IO.Path.Combine(basePath, \"CVaS.Web.xml\");\n options.IncludeXmlComments(pathToDoc);\n }\n }\n}\n", "meta": {"content_hash": "b8679b2703f2562dc9ae00b6419dd1be", "timestamp": "", "source": "github", "line_count": 170, "max_line_length": 124, "avg_line_length": 37.34117647058824, "alnum_prop": 0.5956206679269062, "repo_name": "adamjez/CVaS", "id": "6d110356d3484513441e444b33569c6c5742a253", "size": "6351", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/CVaS.Web/Startup.cs", "mode": "33188", "license": "mit", "language": [{"name": "C#", "bytes": "350481"}, {"name": "CSS", "bytes": "4863"}, {"name": "JavaScript", "bytes": "9750"}, {"name": "PowerShell", "bytes": "50183"}, {"name": "Shell", "bytes": "870"}]}} {"text": "\ufeffnamespace dp2Circulation\n{\n partial class DupForm\n {\n /// \n /// Required designer variable.\n /// \n private System.ComponentModel.IContainer components = null;\n\n /// \n /// Clean up any resources being used.\n /// \n /// true if managed resources should be disposed; otherwise, false.\n protected override void Dispose(bool disposing)\n {\n if (disposing && (components != null))\n {\n components.Dispose();\n }\n\n this.EventFinish.Dispose();\n\n base.Dispose(disposing);\n }\n\n #region Windows Form Designer generated code\n\n /// \n /// Required method for Designer support - do not modify\n /// the contents of this method with the code editor.\n /// \n private void InitializeComponent()\n {\n this.components = new System.ComponentModel.Container();\n System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(DupForm));\n this.label_dupMessage = new System.Windows.Forms.Label();\n this.label2 = new System.Windows.Forms.Label();\n this.textBox_recordPath = new System.Windows.Forms.TextBox();\n this.label1 = new System.Windows.Forms.Label();\n this.listView_browse = new DigitalPlatform.GUI.ListViewNF();\n this.columnHeader_path = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));\n this.columnHeader_sum = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));\n this.imageList_dupItemType = new System.Windows.Forms.ImageList(this.components);\n this.label_message = new System.Windows.Forms.Label();\n this.button_search = new System.Windows.Forms.Button();\n this.comboBox_projectName = new System.Windows.Forms.ComboBox();\n this.button_viewXmlRecord = new System.Windows.Forms.Button();\n this.checkBox_includeLowCols = new System.Windows.Forms.CheckBox();\n this.checkBox_returnAllRecords = new System.Windows.Forms.CheckBox();\n this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();\n this.panel1 = new System.Windows.Forms.Panel();\n this.panel2 = new System.Windows.Forms.Panel();\n this.panel3 = new System.Windows.Forms.Panel();\n this.flowLayoutPanel1 = new System.Windows.Forms.FlowLayoutPanel();\n this.checkBox_returnSearchDetail = new System.Windows.Forms.CheckBox();\n this.tableLayoutPanel1.SuspendLayout();\n this.panel1.SuspendLayout();\n this.panel2.SuspendLayout();\n this.panel3.SuspendLayout();\n this.flowLayoutPanel1.SuspendLayout();\n this.SuspendLayout();\n // \n // label_dupMessage\n // \n this.label_dupMessage.AutoSize = true;\n this.label_dupMessage.Font = new System.Drawing.Font(\"\u5b8b\u4f53\", 9F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(134)));\n this.label_dupMessage.Location = new System.Drawing.Point(3, 425);\n this.label_dupMessage.Name = \"label_dupMessage\";\n this.label_dupMessage.Size = new System.Drawing.Size(114, 18);\n this.label_dupMessage.TabIndex = 1;\n this.label_dupMessage.Text = \"\u5c1a\u672a\u67e5\u91cd...\";\n // \n // label2\n // \n this.label2.AutoSize = true;\n this.label2.Location = new System.Drawing.Point(3, 3);\n this.label2.Name = \"label2\";\n this.label2.Size = new System.Drawing.Size(116, 18);\n this.label2.TabIndex = 0;\n this.label2.Text = \"\u67e5\u91cd\u65b9\u6848(&P):\";\n // \n // textBox_recordPath\n // \n this.textBox_recordPath.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) \n | System.Windows.Forms.AnchorStyles.Right)));\n this.textBox_recordPath.Location = new System.Drawing.Point(142, -2);\n this.textBox_recordPath.Name = \"textBox_recordPath\";\n this.textBox_recordPath.Size = new System.Drawing.Size(268, 28);\n this.textBox_recordPath.TabIndex = 1;\n this.textBox_recordPath.TextChanged += new System.EventHandler(this.textBox_recordPath_TextChanged);\n // \n // label1\n // \n this.label1.AutoSize = true;\n this.label1.Location = new System.Drawing.Point(3, 3);\n this.label1.Name = \"label1\";\n this.label1.Size = new System.Drawing.Size(134, 18);\n this.label1.TabIndex = 0;\n this.label1.Text = \"\u6e90\u8bb0\u5f55\u8def\u5f84(&P):\";\n // \n // listView_browse\n // \n this.listView_browse.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {\n this.columnHeader_path,\n this.columnHeader_sum});\n this.listView_browse.Dock = System.Windows.Forms.DockStyle.Left;\n this.listView_browse.FullRowSelect = true;\n this.listView_browse.HideSelection = false;\n this.listView_browse.LargeImageList = this.imageList_dupItemType;\n this.listView_browse.Location = new System.Drawing.Point(0, 72);\n this.listView_browse.Margin = new System.Windows.Forms.Padding(0);\n this.listView_browse.Name = \"listView_browse\";\n this.listView_browse.Size = new System.Drawing.Size(451, 303);\n this.listView_browse.SmallImageList = this.imageList_dupItemType;\n this.listView_browse.TabIndex = 0;\n this.listView_browse.UseCompatibleStateImageBehavior = false;\n this.listView_browse.View = System.Windows.Forms.View.Details;\n this.listView_browse.ColumnClick += new System.Windows.Forms.ColumnClickEventHandler(this.listView_browse_ColumnClick);\n this.listView_browse.SelectedIndexChanged += new System.EventHandler(this.listView_browse_SelectedIndexChanged);\n this.listView_browse.DoubleClick += new System.EventHandler(this.listView_browse_DoubleClick);\n this.listView_browse.MouseUp += new System.Windows.Forms.MouseEventHandler(this.listView_browse_MouseUp);\n // \n // columnHeader_path\n // \n this.columnHeader_path.Text = \"\u8bb0\u5f55\u8def\u5f84\";\n this.columnHeader_path.Width = 120;\n // \n // columnHeader_sum\n // \n this.columnHeader_sum.Text = \"\u6743\u503c\u548c\";\n this.columnHeader_sum.TextAlign = System.Windows.Forms.HorizontalAlignment.Right;\n this.columnHeader_sum.Width = 70;\n // \n // imageList_dupItemType\n // \n this.imageList_dupItemType.ImageStream = ((System.Windows.Forms.ImageListStreamer)(resources.GetObject(\"imageList_dupItemType.ImageStream\")));\n this.imageList_dupItemType.TransparentColor = System.Drawing.Color.FromArgb(((int)(((byte)(192)))), ((int)(((byte)(192)))), ((int)(((byte)(193)))));\n this.imageList_dupItemType.Images.SetKeyName(0, \"undup_type.bmp\");\n this.imageList_dupItemType.Images.SetKeyName(1, \"dup_type.bmp\");\n // \n // label_message\n // \n this.label_message.AutoSize = true;\n this.label_message.Location = new System.Drawing.Point(3, 443);\n this.label_message.Name = \"label_message\";\n this.label_message.Size = new System.Drawing.Size(17, 18);\n this.label_message.TabIndex = 2;\n this.label_message.Text = \" \";\n // \n // button_search\n // \n this.button_search.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));\n this.button_search.Location = new System.Drawing.Point(418, 0);\n this.button_search.Name = \"button_search\";\n this.button_search.Size = new System.Drawing.Size(112, 33);\n this.button_search.TabIndex = 2;\n this.button_search.Text = \"\u67e5\u91cd(&S)\";\n this.button_search.Click += new System.EventHandler(this.button_search_Click);\n // \n // comboBox_projectName\n // \n this.comboBox_projectName.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) \n | System.Windows.Forms.AnchorStyles.Right)));\n this.comboBox_projectName.FormattingEnabled = true;\n this.comboBox_projectName.Location = new System.Drawing.Point(142, 3);\n this.comboBox_projectName.Name = \"comboBox_projectName\";\n this.comboBox_projectName.Size = new System.Drawing.Size(268, 26);\n this.comboBox_projectName.TabIndex = 1;\n this.comboBox_projectName.DropDown += new System.EventHandler(this.comboBox_projectName_DropDown);\n // \n // button_viewXmlRecord\n // \n this.button_viewXmlRecord.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));\n this.button_viewXmlRecord.Location = new System.Drawing.Point(418, 0);\n this.button_viewXmlRecord.Name = \"button_viewXmlRecord\";\n this.button_viewXmlRecord.Size = new System.Drawing.Size(112, 33);\n this.button_viewXmlRecord.TabIndex = 2;\n this.button_viewXmlRecord.Text = \"XML...\";\n this.button_viewXmlRecord.UseVisualStyleBackColor = true;\n this.button_viewXmlRecord.Click += new System.EventHandler(this.button_viewXmlRecord_Click);\n // \n // checkBox_includeLowCols\n // \n this.checkBox_includeLowCols.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));\n this.checkBox_includeLowCols.AutoSize = true;\n this.checkBox_includeLowCols.Location = new System.Drawing.Point(4, 4);\n this.checkBox_includeLowCols.Margin = new System.Windows.Forms.Padding(4);\n this.checkBox_includeLowCols.Name = \"checkBox_includeLowCols\";\n this.checkBox_includeLowCols.Size = new System.Drawing.Size(295, 22);\n this.checkBox_includeLowCols.TabIndex = 0;\n this.checkBox_includeLowCols.Text = \"\u8fd4\u56de\u4f4e\u4e8e\u9608\u503c\u7684\u8bb0\u5f55\u7684\u6d4f\u89c8\u5217(&B)\";\n this.checkBox_includeLowCols.UseVisualStyleBackColor = true;\n // \n // checkBox_returnAllRecords\n // \n this.checkBox_returnAllRecords.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));\n this.checkBox_returnAllRecords.AutoSize = true;\n this.checkBox_returnAllRecords.Location = new System.Drawing.Point(307, 4);\n this.checkBox_returnAllRecords.Margin = new System.Windows.Forms.Padding(4);\n this.checkBox_returnAllRecords.Name = \"checkBox_returnAllRecords\";\n this.checkBox_returnAllRecords.Size = new System.Drawing.Size(205, 22);\n this.checkBox_returnAllRecords.TabIndex = 1;\n this.checkBox_returnAllRecords.Text = \"\u8fd4\u56de\u5168\u90e8\u547d\u4e2d\u8bb0\u5f55(&A)\";\n this.checkBox_returnAllRecords.UseVisualStyleBackColor = true;\n // \n // tableLayoutPanel1\n // \n this.tableLayoutPanel1.AutoSize = true;\n this.tableLayoutPanel1.AutoSizeMode = System.Windows.Forms.AutoSizeMode.GrowAndShrink;\n this.tableLayoutPanel1.ColumnCount = 1;\n this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle());\n this.tableLayoutPanel1.Controls.Add(this.panel1, 0, 0);\n this.tableLayoutPanel1.Controls.Add(this.label_dupMessage, 0, 4);\n this.tableLayoutPanel1.Controls.Add(this.panel2, 0, 1);\n this.tableLayoutPanel1.Controls.Add(this.listView_browse, 0, 2);\n this.tableLayoutPanel1.Controls.Add(this.label_message, 0, 5);\n this.tableLayoutPanel1.Controls.Add(this.panel3, 0, 3);\n this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;\n this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0);\n this.tableLayoutPanel1.Margin = new System.Windows.Forms.Padding(4);\n this.tableLayoutPanel1.Name = \"tableLayoutPanel1\";\n this.tableLayoutPanel1.RowCount = 7;\n this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());\n this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());\n this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F));\n this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());\n this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());\n this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());\n this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 3F));\n this.tableLayoutPanel1.Size = new System.Drawing.Size(534, 464);\n this.tableLayoutPanel1.TabIndex = 10;\n // \n // panel1\n // \n this.panel1.AutoSize = true;\n this.panel1.Controls.Add(this.label2);\n this.panel1.Controls.Add(this.comboBox_projectName);\n this.panel1.Controls.Add(this.button_search);\n this.panel1.Dock = System.Windows.Forms.DockStyle.Left;\n this.panel1.Location = new System.Drawing.Point(0, 0);\n this.panel1.Margin = new System.Windows.Forms.Padding(0);\n this.panel1.Name = \"panel1\";\n this.panel1.Size = new System.Drawing.Size(848, 36);\n this.panel1.TabIndex = 0;\n // \n // panel2\n // \n this.panel2.AutoSize = true;\n this.panel2.Controls.Add(this.label1);\n this.panel2.Controls.Add(this.textBox_recordPath);\n this.panel2.Controls.Add(this.button_viewXmlRecord);\n this.panel2.Dock = System.Windows.Forms.DockStyle.Left;\n this.panel2.Location = new System.Drawing.Point(0, 36);\n this.panel2.Margin = new System.Windows.Forms.Padding(0);\n this.panel2.Name = \"panel2\";\n this.panel2.Size = new System.Drawing.Size(848, 36);\n this.panel2.TabIndex = 1;\n // \n // panel3\n // \n this.panel3.BackColor = System.Drawing.SystemColors.Control;\n this.panel3.Controls.Add(this.flowLayoutPanel1);\n this.panel3.Dock = System.Windows.Forms.DockStyle.Fill;\n this.panel3.Location = new System.Drawing.Point(0, 375);\n this.panel3.Margin = new System.Windows.Forms.Padding(0);\n this.panel3.Name = \"panel3\";\n this.panel3.Size = new System.Drawing.Size(848, 50);\n this.panel3.TabIndex = 7;\n // \n // flowLayoutPanel1\n // \n this.flowLayoutPanel1.Controls.Add(this.checkBox_includeLowCols);\n this.flowLayoutPanel1.Controls.Add(this.checkBox_returnAllRecords);\n this.flowLayoutPanel1.Controls.Add(this.checkBox_returnSearchDetail);\n this.flowLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;\n this.flowLayoutPanel1.Location = new System.Drawing.Point(0, 0);\n this.flowLayoutPanel1.Name = \"flowLayoutPanel1\";\n this.flowLayoutPanel1.Size = new System.Drawing.Size(848, 50);\n this.flowLayoutPanel1.TabIndex = 2;\n // \n // checkBox_returnSearchDetail\n // \n this.checkBox_returnSearchDetail.AutoSize = true;\n this.checkBox_returnSearchDetail.Location = new System.Drawing.Point(519, 3);\n this.checkBox_returnSearchDetail.Name = \"checkBox_returnSearchDetail\";\n this.checkBox_returnSearchDetail.Size = new System.Drawing.Size(169, 22);\n this.checkBox_returnSearchDetail.TabIndex = 2;\n this.checkBox_returnSearchDetail.Text = \"\u8fd4\u56de\u68c0\u7d22\u8be6\u60c5(&S)\";\n this.checkBox_returnSearchDetail.UseVisualStyleBackColor = true;\n // \n // DupForm\n // \n this.AutoScaleDimensions = new System.Drawing.SizeF(9F, 18F);\n this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;\n this.ClientSize = new System.Drawing.Size(534, 464);\n this.Controls.Add(this.tableLayoutPanel1);\n this.Icon = ((System.Drawing.Icon)(resources.GetObject(\"$this.Icon\")));\n this.Name = \"DupForm\";\n this.ShowInTaskbar = false;\n this.Text = \"DupForm\";\n this.Activated += new System.EventHandler(this.DupForm_Activated);\n this.FormClosing += new System.Windows.Forms.FormClosingEventHandler(this.DupForm_FormClosing);\n this.FormClosed += new System.Windows.Forms.FormClosedEventHandler(this.DupForm_FormClosed);\n this.Load += new System.EventHandler(this.DupForm_Load);\n this.SizeChanged += new System.EventHandler(this.DupForm_SizeChanged);\n this.tableLayoutPanel1.ResumeLayout(false);\n this.tableLayoutPanel1.PerformLayout();\n this.panel1.ResumeLayout(false);\n this.panel1.PerformLayout();\n this.panel2.ResumeLayout(false);\n this.panel2.PerformLayout();\n this.panel3.ResumeLayout(false);\n this.flowLayoutPanel1.ResumeLayout(false);\n this.flowLayoutPanel1.PerformLayout();\n this.ResumeLayout(false);\n this.PerformLayout();\n\n }\n\n #endregion\n\n private System.Windows.Forms.Label label_dupMessage;\n private System.Windows.Forms.Label label2;\n private System.Windows.Forms.TextBox textBox_recordPath;\n private System.Windows.Forms.Label label1;\n private DigitalPlatform.GUI.ListViewNF listView_browse;\n private System.Windows.Forms.ColumnHeader columnHeader_path;\n private System.Windows.Forms.ColumnHeader columnHeader_sum;\n private System.Windows.Forms.Label label_message;\n private System.Windows.Forms.Button button_search;\n private System.Windows.Forms.ComboBox comboBox_projectName;\n private System.Windows.Forms.Button button_viewXmlRecord;\n private System.Windows.Forms.ImageList imageList_dupItemType;\n private System.Windows.Forms.CheckBox checkBox_includeLowCols;\n private System.Windows.Forms.CheckBox checkBox_returnAllRecords;\n private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;\n private System.Windows.Forms.Panel panel1;\n private System.Windows.Forms.Panel panel2;\n private System.Windows.Forms.Panel panel3;\n private System.Windows.Forms.FlowLayoutPanel flowLayoutPanel1;\n private System.Windows.Forms.CheckBox checkBox_returnSearchDetail;\n }\n}", "meta": {"content_hash": "6ba39200f442244557ede07c8794e97c", "timestamp": "", "source": "github", "line_count": 343, "max_line_length": 175, "avg_line_length": 56.48979591836735, "alnum_prop": 0.6343414533443436, "repo_name": "DigitalPlatform/dp2", "id": "77363cd47cb9b9ce3ebf0e342b5051e17e57388e", "size": "19480", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dp2Circulation/Dup/DupForm.Designer.cs", "mode": "33188", "license": "apache-2.0", "language": [{"name": "ASP.NET", "bytes": "80547"}, {"name": "Batchfile", "bytes": "7192"}, {"name": "C#", "bytes": "56347865"}, {"name": "CSS", "bytes": "818819"}, {"name": "HTML", "bytes": "1914736"}, {"name": "JavaScript", "bytes": "152102"}, {"name": "PHP", "bytes": "30185"}, {"name": "Roff", "bytes": "1879"}, {"name": "Smalltalk", "bytes": "48625"}, {"name": "XSLT", "bytes": "64230"}]}} {"text": "parent = false;\n\n $this->blocks = array(\n );\n }\n\n protected function doDisplay(array $context, array $blocks = array())\n {\n // line 1\n echo \"\n\";\n }\n\n public function getTemplateName()\n {\n return \"@WebProfiler/Profiler/base_js.html.twig\";\n }\n\n public function isTraitable()\n {\n return false;\n }\n\n public function getDebugInfo()\n {\n return array ( 248 => 222, 216 => 193, 208 => 187, 206 => 186, 19 => 1,);\n }\n}\n", "meta": {"content_hash": "accaa677207166d3aa8983fcfa7a26b2", "timestamp": "", "source": "github", "line_count": 322, "max_line_length": 197, "avg_line_length": 37.02795031055901, "alnum_prop": 0.4314350415163969, "repo_name": "novikovm/Store", "id": "87eb58ce6f9be9b0c2501bbea64a2cba081e7680", "size": "11923", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "app/cache/dev/twig/e7/98/0dfb8bff76e82b49ca15a592b08d8e457a6b35170d5e64a247a3d4b200aa.php", "mode": "33188", "license": "mit", "language": [{"name": "ApacheConf", "bytes": "3073"}, {"name": "CSS", "bytes": "792"}, {"name": "JavaScript", "bytes": "2824"}, {"name": "PHP", "bytes": "77153"}]}} {"text": "\ufeff\n//____________________________________________________________________________\n//\n// Copyright (C) 2019, Mariusz Postol LODZ POLAND.\n//\n// To be in touch join the community at GITTER: https://gitter.im/mpostol/OPC-UA-OOI\n//____________________________________________________________________________\n\nnamespace UAOOI.Networking.Core\n{\n\n /// \n /// Enum HandlerState - represents states of an configurable object. \n /// \n public enum HandlerState\n {\n\n /// \n /// The handler is not configured and cannot be enabled.\n /// \n NoConfiguration,\n /// \n /// The handler is configured but currently disabled.\n /// \n Disabled,\n /// \n /// The handler is operational.\n /// \n Operational,\n /// \n /// The handler is in an error state, i.e. cannot change the state to Operational. Similar to NoConfiguration state but after an error occurs.\n /// \n Error\n\n }\n}\n", "meta": {"content_hash": "98203845daf7853e0ca239f9d405aa50", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 146, "avg_line_length": 27.666666666666668, "alnum_prop": 0.5341365461847389, "repo_name": "mpostol/OPC-UA-OOI", "id": "4876b7e7815269c8f12ab80fe0feee9235e7c47a", "size": "998", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Networking/Core/HandlerState.cs", "mode": "33188", "license": "mit", "language": [{"name": "Batchfile", "bytes": "8495"}, {"name": "C#", "bytes": "4749023"}, {"name": "Vim Snippet", "bytes": "1008"}]}} {"text": "\n\n#ifdef KROLL_COVERAGE\n\n#import \"TiBase.h\"\n#import \"KrollObject.h\"\n#import \"KrollMethod.h\"\n\n#define COMPONENT_TYPE_PROXIES @\"proxies\"\n#define COMPONENT_TYPE_MODULES @\"modules\"\n#define COMPONENT_TYPE_OTHER @\"other\"\n\n#define API_TYPE_FUNCTION @\"function\"\n#define API_TYPE_PROPERTY @\"property\"\n\n#define COVERAGE_TYPE_GET @\"propertyGet\"\n#define COVERAGE_TYPE_SET @\"propertySet\"\n#define COVERAGE_TYPE_CALL @\"functionCall\"\n\n#define TOP_LEVEL @\"TOP_LEVEL\"\n\n@protocol KrollCoverage \n-(void)increment:(NSString*)apiName coverageType:(NSString*)coverageType apiType:(NSString*)apiType;\n-(NSString*)coverageName;\n-(NSString*)coverageType;\n@end\n\n@interface KrollCoverageObject : KrollObject {\n@private\n\tNSString *componentName, *componentType;\n}\n\n@property(nonatomic,copy) NSString *componentName;\n@property(nonatomic,copy) NSString *componentType;\n\n+(void)incrementCoverage:(NSString*)componentType_ componentName:(NSString*)componentName_ apiName:(NSString*)apiName_ coverageType:(NSString*)coverageType_ apiType:(NSString*)apiType_;\n+(void)incrementTopLevelFunctionCall:(NSString*)componentName name:(NSString*)apiName;\n+(NSDictionary*)dumpCoverage;\n+(void)releaseCoverage;\n\n-(id)initWithTarget:(id)target_ context:(KrollContext*)context_;\n-(id)initWithTarget:(id)target_ context:(KrollContext*)context_ componentName:(NSString*)componentName_;\n\n@end\n\n@interface KrollCoverageMethod : KrollMethod {\n@private\n\tNSString *parentName, *parentType;\n id parent;\n}\n\n@property(nonatomic,copy) NSString *parentName;\n@property(nonatomic,copy) NSString *parentType;\n\n-(id)initWithTarget:(id)target_ context:(KrollContext *)context_ parent:(id)parent_;\n-(id)initWithTarget:(id)target_ selector:(SEL)selector_ argcount:(int)argcount_ type:(KrollMethodType)type_ name:(id)name_ context:(KrollContext*)context_ parent:(id)parent_;\n\n-(id)call:(NSArray*)args;\n\n@end\n\n#endif", "meta": {"content_hash": "21bb573fe092080f36e7572f9593252a", "timestamp": "", "source": "github", "line_count": 62, "max_line_length": 185, "avg_line_length": 31.016129032258064, "alnum_prop": 0.7795111804472179, "repo_name": "AppWerft/SunTracker", "id": "a35c96cbe93d2220f17c7fc21f3d721e8b64e370", "size": "2220", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "build/iphone/Classes/KrollCoverage.h", "mode": "33188", "license": "apache-2.0", "language": [{"name": "C", "bytes": "139582"}, {"name": "C++", "bytes": "56872"}, {"name": "D", "bytes": "1093222"}, {"name": "JavaScript", "bytes": "31672"}, {"name": "Objective-C", "bytes": "3310532"}, {"name": "Shell", "bytes": "270"}]}} {"text": "{% extends 'base.html' %}\n\n{% block content %}\n\n
\n {% for article in articles %}\n
\n

{{ article.title }}

\n \n

{{ article.summary }}

\n
\n {% endfor %}\n
\n\n{% endblock %}", "meta": {"content_hash": "d9a765cd0f3bf2cf46dc62c75e9fa255", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 66, "avg_line_length": 22.466666666666665, "alnum_prop": 0.5311572700296736, "repo_name": "greencoder/scrapester.xyz", "id": "0091584c04c82a77fd57bb37fb64d8bd61ed4b28", "size": "337", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "themes/custom/templates/index.html", "mode": "33188", "license": "mit", "language": [{"name": "CSS", "bytes": "3508"}, {"name": "HTML", "bytes": "2242"}, {"name": "Python", "bytes": "1704"}, {"name": "Shell", "bytes": "2201"}]}} {"text": "using namespace cv;\nusing namespace cv::xfeatures2d;\nusing std::cout;\nusing std::endl;\n\nconst char* keys =\n \"{ help h | | Print help message. }\"\n \"{ input1 | box.png | Path to input image 1. }\"\n \"{ input2 | box_in_scene.png | Path to input image 2. }\";\n\nint main( int argc, char* argv[] )\n{\n CommandLineParser parser( argc, argv, keys );\n Mat img_object = imread( samples::findFile( parser.get(\"input1\") ), IMREAD_GRAYSCALE );\n Mat img_scene = imread( samples::findFile( parser.get(\"input2\") ), IMREAD_GRAYSCALE );\n if ( img_object.empty() || img_scene.empty() )\n {\n cout << \"Could not open or find the image!\\n\" << endl;\n parser.printMessage();\n return -1;\n }\n\n //-- Step 1: Detect the keypoints using SURF Detector, compute the descriptors\n int minHessian = 400;\n Ptr detector = SURF::create( minHessian );\n std::vector keypoints_object, keypoints_scene;\n Mat descriptors_object, descriptors_scene;\n detector->detectAndCompute( img_object, noArray(), keypoints_object, descriptors_object );\n detector->detectAndCompute( img_scene, noArray(), keypoints_scene, descriptors_scene );\n\n //-- Step 2: Matching descriptor vectors with a FLANN based matcher\n // Since SURF is a floating-point descriptor NORM_L2 is used\n Ptr matcher = DescriptorMatcher::create(DescriptorMatcher::FLANNBASED);\n std::vector< std::vector > knn_matches;\n matcher->knnMatch( descriptors_object, descriptors_scene, knn_matches, 2 );\n\n //-- Filter matches using the Lowe's ratio test\n const float ratio_thresh = 0.75f;\n std::vector good_matches;\n for (size_t i = 0; i < knn_matches.size(); i++)\n {\n if (knn_matches[i][0].distance < ratio_thresh * knn_matches[i][1].distance)\n {\n good_matches.push_back(knn_matches[i][0]);\n }\n }\n\n //-- Draw matches\n Mat img_matches;\n drawMatches( img_object, keypoints_object, img_scene, keypoints_scene, good_matches, img_matches, Scalar::all(-1),\n Scalar::all(-1), std::vector(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS );\n\n //-- Localize the object\n std::vector obj;\n std::vector scene;\n\n for( size_t i = 0; i < good_matches.size(); i++ )\n {\n //-- Get the keypoints from the good matches\n obj.push_back( keypoints_object[ good_matches[i].queryIdx ].pt );\n scene.push_back( keypoints_scene[ good_matches[i].trainIdx ].pt );\n }\n\n Mat H = findHomography( obj, scene, RANSAC );\n\n //-- Get the corners from the image_1 ( the object to be \"detected\" )\n std::vector obj_corners(4);\n obj_corners[0] = Point2f(0, 0);\n obj_corners[1] = Point2f( (float)img_object.cols, 0 );\n obj_corners[2] = Point2f( (float)img_object.cols, (float)img_object.rows );\n obj_corners[3] = Point2f( 0, (float)img_object.rows );\n std::vector scene_corners(4);\n\n perspectiveTransform( obj_corners, scene_corners, H);\n\n //-- Draw lines between the corners (the mapped object in the scene - image_2 )\n line( img_matches, scene_corners[0] + Point2f((float)img_object.cols, 0),\n scene_corners[1] + Point2f((float)img_object.cols, 0), Scalar(0, 255, 0), 4 );\n line( img_matches, scene_corners[1] + Point2f((float)img_object.cols, 0),\n scene_corners[2] + Point2f((float)img_object.cols, 0), Scalar( 0, 255, 0), 4 );\n line( img_matches, scene_corners[2] + Point2f((float)img_object.cols, 0),\n scene_corners[3] + Point2f((float)img_object.cols, 0), Scalar( 0, 255, 0), 4 );\n line( img_matches, scene_corners[3] + Point2f((float)img_object.cols, 0),\n scene_corners[0] + Point2f((float)img_object.cols, 0), Scalar( 0, 255, 0), 4 );\n\n //-- Show detected matches\n imshow(\"Good Matches & Object detection\", img_matches );\n\n waitKey();\n return 0;\n}\n#else\nint main()\n{\n std::cout << \"This tutorial code needs the xfeatures2d contrib module to be run.\" << std::endl;\n return 0;\n}\n#endif\n", "meta": {"content_hash": "4915b4b4ba5e249df6a02b4b5e3746b4", "timestamp": "", "source": "github", "line_count": 98, "max_line_length": 118, "avg_line_length": 41.704081632653065, "alnum_prop": 0.6359187668216295, "repo_name": "opencv/opencv", "id": "2aa0bcd9cbb9c2eaafdc85aaa38e3577440360ed", "size": "4329", "binary": false, "copies": "2", "ref": "refs/heads/4.x", "path": "samples/cpp/tutorial_code/features2D/feature_homography/SURF_FLANN_matching_homography_Demo.cpp", "mode": "33188", "license": "apache-2.0", "language": [{"name": "AIDL", "bytes": "1986"}, {"name": "Batchfile", "bytes": "1498"}, {"name": "C", "bytes": "1543870"}, {"name": "C++", "bytes": "35975082"}, {"name": "CMake", "bytes": "1010867"}, {"name": "Cuda", "bytes": "333437"}, {"name": "Dockerfile", "bytes": "309"}, {"name": "HTML", "bytes": "40027"}, {"name": "Java", "bytes": "774232"}, {"name": "JavaScript", "bytes": "233673"}, {"name": "Kotlin", "bytes": "5204"}, {"name": "Objective-C", "bytes": "100731"}, {"name": "Objective-C++", "bytes": "392600"}, {"name": "Perl", "bytes": "15865"}, {"name": "PowerShell", "bytes": "14591"}, {"name": "Prolog", "bytes": "843"}, {"name": "Python", "bytes": "1038154"}, {"name": "Shell", "bytes": "22738"}, {"name": "Swift", "bytes": "301765"}, {"name": "TeX", "bytes": "3530"}]}} {"text": "\n\n\n\n