repo_name
stringlengths 5
108
| path
stringlengths 6
333
| size
stringlengths 1
6
| content
stringlengths 4
977k
| license
stringclasses 15
values |
---|---|---|---|---|
ChristianMurphy/uPortal | uPortal-io/uPortal-io-types/src/main/java/org/apereo/portal/io/xml/eventaggr/ExternalTermDetailComparator.java | 1361 | /**
* Licensed to Apereo under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright ownership. Apereo
* licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the License at the
* following location:
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apereo.portal.io.xml.eventaggr;
import java.util.Calendar;
import org.apereo.portal.utils.ComparableExtractingComparator;
/** Compare ExternalTermDetail based on start date */
public class ExternalTermDetailComparator
extends ComparableExtractingComparator<ExternalTermDetail, Calendar> {
public static final ExternalTermDetailComparator INSTANCE = new ExternalTermDetailComparator();
private ExternalTermDetailComparator() {}
@Override
protected Calendar getComparable(ExternalTermDetail o) {
return o.getStart();
}
}
| apache-2.0 |
GlenRSmith/elasticsearch | test/framework/src/main/java/org/elasticsearch/test/AbstractSchemaValidationTestCase.java | 7102 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.test;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.networknt.schema.AdditionalPropertiesValidator;
import com.networknt.schema.ItemsValidator;
import com.networknt.schema.JsonSchema;
import com.networknt.schema.JsonSchemaFactory;
import com.networknt.schema.JsonValidator;
import com.networknt.schema.PropertiesValidator;
import com.networknt.schema.SchemaValidatorsConfig;
import com.networknt.schema.SpecVersion;
import com.networknt.schema.ValidationMessage;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Map.Entry;
import java.util.Set;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
/**
* Test case for validating {@link ToXContent} objects against a json schema.
*/
public abstract class AbstractSchemaValidationTestCase<T extends ToXContent> extends ESTestCase {
protected static final int NUMBER_OF_TEST_RUNS = 20;
public final void testSchema() throws IOException {
ObjectMapper mapper = new ObjectMapper();
SchemaValidatorsConfig config = new SchemaValidatorsConfig();
JsonSchemaFactory factory = initializeSchemaFactory();
Path p = getDataPath(getSchemaLocation() + getJsonSchemaFileName());
logger.debug("loading schema from: [{}]", p);
JsonSchema jsonSchema = factory.getSchema(mapper.readTree(Files.newInputStream(p)), config);
// ensure the schema meets certain criteria like not empty, strictness
assertTrue("found empty schema", jsonSchema.getValidators().size() > 0);
assertTrue("schema lacks at least 1 required field", jsonSchema.hasRequiredValidator());
assertSchemaStrictness(jsonSchema.getValidators().values(), jsonSchema.getSchemaPath());
for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) {
BytesReference xContent = XContentHelper.toXContent(createTestInstance(), XContentType.JSON, getToXContentParams(), false);
JsonNode jsonTree = mapper.readTree(xContent.streamInput());
Set<ValidationMessage> errors = jsonSchema.validate(jsonTree);
assertThat("Schema validation failed for: " + jsonTree.toPrettyString(), errors, is(empty()));
}
}
/**
* Creates a random instance to use in the schema tests.
* Override this method to return the random instance that you build
* which must implement {@link ToXContent}.
*/
protected abstract T createTestInstance();
/**
* Return the filename of the schema file used for testing.
*/
protected abstract String getJsonSchemaFileName();
/**
* Params that have to be provided when calling {@link ToXContent#toXContent(XContentBuilder, ToXContent.Params)}
*/
protected ToXContent.Params getToXContentParams() {
return ToXContent.EMPTY_PARAMS;
}
/**
* Root folder for all schema files.
*/
protected String getSchemaLocation() {
return "/rest-api-spec/schema/";
}
/**
* Version of the Json Schema Spec to be used by the test.
*/
protected SpecVersion.VersionFlag getSchemaVersion() {
return SpecVersion.VersionFlag.V7;
}
/**
* Loader for the schema factory.
*
* Uses the ootb factory but replaces the loader for sub schema's stored on the file system.
*/
private JsonSchemaFactory initializeSchemaFactory() {
JsonSchemaFactory factory = JsonSchemaFactory.builder(JsonSchemaFactory.getInstance(getSchemaVersion())).uriFetcher(uri -> {
String fileName = uri.toString().substring(uri.getScheme().length() + 1);
Path path = getDataPath(getSchemaLocation() + fileName);
logger.debug("loading sub-schema [{}] from: [{}]", uri, path);
return Files.newInputStream(path);
}, "file").build();
return factory;
}
/**
* Enforce that the schema as well as all sub schemas define all properties.
*
* This uses an implementation detail of the schema validation library: If
* strict validation is turned on (`"additionalProperties": false`), the schema
* validator injects an instance of AdditionalPropertiesValidator.
*
* The check loops through the validator tree and checks for instances of
* AdditionalPropertiesValidator. If it is absent at expected places the test fails.
*
* Note: we might not catch all places, but at least it works for nested objects and
* array items.
*/
private void assertSchemaStrictness(Collection<JsonValidator> validatorSet, String path) {
boolean additionalPropertiesValidatorFound = false;
boolean subSchemaFound = false;
for (JsonValidator validator : validatorSet) {
if (validator instanceof PropertiesValidator) {
subSchemaFound = true;
PropertiesValidator propertiesValidator = (PropertiesValidator) validator;
for (Entry<String, JsonSchema> subSchema : propertiesValidator.getSchemas().entrySet()) {
assertSchemaStrictness(subSchema.getValue().getValidators().values(), propertiesValidator.getSchemaPath());
}
} else if (validator instanceof ItemsValidator) {
ItemsValidator itemValidator = (ItemsValidator) validator;
if (itemValidator.getSchema() != null) {
assertSchemaStrictness(itemValidator.getSchema().getValidators().values(), itemValidator.getSchemaPath());
}
if (itemValidator.getTupleSchema() != null) {
for (JsonSchema subSchema : itemValidator.getTupleSchema()) {
assertSchemaStrictness(subSchema.getValidators().values(), itemValidator.getSchemaPath());
}
}
} else if (validator instanceof AdditionalPropertiesValidator) {
additionalPropertiesValidatorFound = true;
}
}
// if not a leaf, additional property strictness must be set
assertTrue(
"the schema must have additional properties set to false (\"additionalProperties\": false) in all (sub) schemas, "
+ "missing at least for path: "
+ path,
subSchemaFound == false || additionalPropertiesValidatorFound
);
}
}
| apache-2.0 |
jvanz/core | qadevOOo/tests/java/ifc/java/_XJavaVM.java | 1862 | /*
* This file is part of the LibreOffice project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* This file incorporates work covered by the following license notice:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 .
*/
package ifc.java;
import lib.MultiMethodTest;
import lib.Status;
import lib.StatusException;
import com.sun.star.java.XJavaVM;
/**
* Testing <code>com.sun.star.java.XJavaVM</code>
* interface methods :
* <ul>
* <li><code> getJavaVM()</code></li>
* <li><code> isVMStarted()</code></li>
* <li><code> isVMEnabled()</code></li>
* </ul> <p>
* <b> Nothing tested here. </b>
* @see com.sun.star.java.XJavaVM
*/
public class _XJavaVM extends MultiMethodTest {
public XJavaVM oObj;
@Override
protected void after() {
log.println("Skipping all XJavaVM methods, since they"
+ " can't be tested in the context");
throw new StatusException(Status.skipped(true));
}
public void _getJavaVM() {
// skipping the test
tRes.tested("getJavaVM()", true);
}
public void _isVMEnabled() {
// skipping the test
tRes.tested("isVMEnabled()", true);
}
public void _isVMStarted() {
// skipping the test
tRes.tested("isVMStarted()", true);
}
}
| gpl-3.0 |
asedunov/intellij-community | platform/vcs-impl/src/com/intellij/openapi/vcs/impl/CodeSmellDetectorImpl.java | 8464 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.impl;
import com.intellij.codeInsight.CodeSmellInfo;
import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer;
import com.intellij.codeInsight.daemon.HighlightDisplayKey;
import com.intellij.codeInsight.daemon.impl.*;
import com.intellij.ide.errorTreeView.NewErrorTreeViewPanel;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.progress.util.AbstractProgressIndicatorExBase;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vcs.AbstractVcsHelper;
import com.intellij.openapi.vcs.CodeSmellDetector;
import com.intellij.openapi.vcs.VcsBundle;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.ex.ProgressIndicatorEx;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.ui.MessageCategory;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* @author yole
*/
public class CodeSmellDetectorImpl extends CodeSmellDetector {
private final Project myProject;
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.vcs.impl.CodeSmellDetectorImpl");
public CodeSmellDetectorImpl(final Project project) {
myProject = project;
}
@Override
public void showCodeSmellErrors(@NotNull final List<CodeSmellInfo> smellList) {
Collections.sort(smellList, (o1, o2) -> o1.getTextRange().getStartOffset() - o2.getTextRange().getStartOffset());
ApplicationManager.getApplication().invokeLater(() -> {
if (myProject.isDisposed()) return;
if (smellList.isEmpty()) {
return;
}
final VcsErrorViewPanel errorTreeView = new VcsErrorViewPanel(myProject);
AbstractVcsHelperImpl helper = (AbstractVcsHelperImpl)AbstractVcsHelper.getInstance(myProject);
helper.openMessagesView(errorTreeView, VcsBundle.message("code.smells.error.messages.tab.name"));
FileDocumentManager fileManager = FileDocumentManager.getInstance();
for (CodeSmellInfo smellInfo : smellList) {
final VirtualFile file = fileManager.getFile(smellInfo.getDocument());
final OpenFileDescriptor navigatable =
new OpenFileDescriptor(myProject, file, smellInfo.getStartLine(), smellInfo.getStartColumn());
final String exportPrefix = NewErrorTreeViewPanel.createExportPrefix(smellInfo.getStartLine() + 1);
final String rendererPrefix =
NewErrorTreeViewPanel.createRendererPrefix(smellInfo.getStartLine() + 1, smellInfo.getStartColumn() + 1);
if (smellInfo.getSeverity() == HighlightSeverity.ERROR) {
errorTreeView.addMessage(MessageCategory.ERROR, new String[]{smellInfo.getDescription()}, file.getPresentableUrl(), navigatable,
exportPrefix, rendererPrefix, null);
}
else {//if (smellInfo.getSeverity() == HighlightSeverity.WARNING) {
errorTreeView.addMessage(MessageCategory.WARNING, new String[]{smellInfo.getDescription()}, file.getPresentableUrl(),
navigatable, exportPrefix, rendererPrefix, null);
}
}
});
}
@NotNull
@Override
public List<CodeSmellInfo> findCodeSmells(@NotNull final List<VirtualFile> filesToCheck) throws ProcessCanceledException {
ApplicationManager.getApplication().assertIsDispatchThread();
final List<CodeSmellInfo> result = new ArrayList<>();
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
if (ApplicationManager.getApplication().isWriteAccessAllowed()) throw new RuntimeException("Must not run under write action");
final Ref<Exception> exception = Ref.create();
ProgressManager.getInstance().run(new Task.Modal(myProject, VcsBundle.message("checking.code.smells.progress.title"), true) {
@Override
public void run(@NotNull ProgressIndicator progress) {
try {
for (int i = 0; i < filesToCheck.size(); i++) {
if (progress.isCanceled()) throw new ProcessCanceledException();
final VirtualFile file = filesToCheck.get(i);
progress.setText(VcsBundle.message("searching.for.code.smells.processing.file.progress.text", file.getPresentableUrl()));
progress.setFraction((double)i / (double)filesToCheck.size());
result.addAll(findCodeSmells(file, progress));
}
}
catch (ProcessCanceledException e) {
exception.set(e);
}
catch (Exception e) {
LOG.error(e);
exception.set(e);
}
}
});
if (!exception.isNull()) {
ExceptionUtil.rethrowAllAsUnchecked(exception.get());
}
return result;
}
@NotNull
private List<CodeSmellInfo> findCodeSmells(@NotNull final VirtualFile file, @NotNull final ProgressIndicator progress) {
final List<CodeSmellInfo> result = Collections.synchronizedList(new ArrayList<CodeSmellInfo>());
final DaemonCodeAnalyzerImpl codeAnalyzer = (DaemonCodeAnalyzerImpl)DaemonCodeAnalyzer.getInstance(myProject);
final ProgressIndicator daemonIndicator = new DaemonProgressIndicator();
((ProgressIndicatorEx)progress).addStateDelegate(new AbstractProgressIndicatorExBase() {
@Override
public void cancel() {
super.cancel();
daemonIndicator.cancel();
}
});
ProgressManager.getInstance().runProcess(() -> DumbService.getInstance(myProject).runReadActionInSmartMode(() -> {
final PsiFile psiFile = PsiManager.getInstance(myProject).findFile(file);
final Document document = FileDocumentManager.getInstance().getDocument(file);
if (psiFile == null || document == null) {
return;
}
List<HighlightInfo> infos = codeAnalyzer.runMainPasses(psiFile, document, daemonIndicator);
convertErrorsAndWarnings(infos, result, document);
}), daemonIndicator);
return result;
}
private void convertErrorsAndWarnings(@NotNull Collection<HighlightInfo> highlights,
@NotNull List<CodeSmellInfo> result,
@NotNull Document document) {
for (HighlightInfo highlightInfo : highlights) {
final HighlightSeverity severity = highlightInfo.getSeverity();
if (SeverityRegistrar.getSeverityRegistrar(myProject).compare(severity, HighlightSeverity.WARNING) >= 0) {
result.add(new CodeSmellInfo(document, getDescription(highlightInfo),
new TextRange(highlightInfo.startOffset, highlightInfo.endOffset), severity));
}
}
}
private static String getDescription(@NotNull HighlightInfo highlightInfo) {
final String description = highlightInfo.getDescription();
final HighlightInfoType type = highlightInfo.type;
if (type instanceof HighlightInfoType.HighlightInfoTypeSeverityByKey) {
final HighlightDisplayKey severityKey = ((HighlightInfoType.HighlightInfoTypeSeverityByKey)type).getSeverityKey();
final String id = severityKey.getID();
return "[" + id + "] " + description;
}
return description;
}
}
| apache-2.0 |
bruthe/hadoop-2.6.0r | src/yarn/server/org/apache/hadoop/yarn/server/nodemanager/DirectoryCollection.java | 12900 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.DiskChecker;
/**
* Manages a list of local storage directories.
*/
class DirectoryCollection {
private static final Log LOG = LogFactory.getLog(DirectoryCollection.class);
public enum DiskErrorCause {
DISK_FULL, OTHER
}
static class DiskErrorInformation {
DiskErrorCause cause;
String message;
DiskErrorInformation(DiskErrorCause cause, String message) {
this.cause = cause;
this.message = message;
}
}
/**
* Returns a merged list which contains all the elements of l1 and l2
* @param l1 the first list to be included
* @param l2 the second list to be included
* @return a new list containing all the elements of the first and second list
*/
static List<String> concat(List<String> l1, List<String> l2) {
List<String> ret = new ArrayList<String>(l1.size() + l2.size());
ret.addAll(l1);
ret.addAll(l2);
return ret;
}
// Good local storage directories
private List<String> localDirs;
private List<String> errorDirs;
private List<String> fullDirs;
private int numFailures;
private float diskUtilizationPercentageCutoff;
private long diskUtilizationSpaceCutoff;
/**
* Create collection for the directories specified. No check for free space.
*
* @param dirs
* directories to be monitored
*/
public DirectoryCollection(String[] dirs) {
this(dirs, 100.0F, 0);
}
/**
* Create collection for the directories specified. Users must specify the
* maximum percentage of disk utilization allowed. Minimum amount of disk
* space is not checked.
*
* @param dirs
* directories to be monitored
* @param utilizationPercentageCutOff
* percentage of disk that can be used before the dir is taken out of
* the good dirs list
*
*/
public DirectoryCollection(String[] dirs, float utilizationPercentageCutOff) {
this(dirs, utilizationPercentageCutOff, 0);
}
/**
* Create collection for the directories specified. Users must specify the
* minimum amount of free space that must be available for the dir to be used.
*
* @param dirs
* directories to be monitored
* @param utilizationSpaceCutOff
* minimum space, in MB, that must be available on the disk for the
* dir to be marked as good
*
*/
public DirectoryCollection(String[] dirs, long utilizationSpaceCutOff) {
this(dirs, 100.0F, utilizationSpaceCutOff);
}
/**
* Create collection for the directories specified. Users must specify the
* maximum percentage of disk utilization allowed and the minimum amount of
* free space that must be available for the dir to be used. If either check
* fails the dir is removed from the good dirs list.
*
* @param dirs
* directories to be monitored
* @param utilizationPercentageCutOff
* percentage of disk that can be used before the dir is taken out of
* the good dirs list
* @param utilizationSpaceCutOff
* minimum space, in MB, that must be available on the disk for the
* dir to be marked as good
*
*/
public DirectoryCollection(String[] dirs,
float utilizationPercentageCutOff,
long utilizationSpaceCutOff) {
localDirs = new CopyOnWriteArrayList<String>(dirs);
errorDirs = new CopyOnWriteArrayList<String>();
fullDirs = new CopyOnWriteArrayList<String>();
diskUtilizationPercentageCutoff = utilizationPercentageCutOff;
diskUtilizationSpaceCutoff = utilizationSpaceCutOff;
diskUtilizationPercentageCutoff =
utilizationPercentageCutOff < 0.0F ? 0.0F
: (utilizationPercentageCutOff > 100.0F ? 100.0F
: utilizationPercentageCutOff);
diskUtilizationSpaceCutoff =
utilizationSpaceCutOff < 0 ? 0 : utilizationSpaceCutOff;
}
/**
* @return the current valid directories
*/
synchronized List<String> getGoodDirs() {
return Collections.unmodifiableList(localDirs);
}
/**
* @return the failed directories
*/
synchronized List<String> getFailedDirs() {
return Collections.unmodifiableList(
DirectoryCollection.concat(errorDirs, fullDirs));
}
/**
* @return the directories that have used all disk space
*/
synchronized List<String> getFullDirs() {
return fullDirs;
}
/**
* @return total the number of directory failures seen till now
*/
synchronized int getNumFailures() {
return numFailures;
}
/**
* Create any non-existent directories and parent directories, updating the
* list of valid directories if necessary.
* @param localFs local file system to use
* @param perm absolute permissions to use for any directories created
* @return true if there were no errors, false if at least one error occurred
*/
synchronized boolean createNonExistentDirs(FileContext localFs,
FsPermission perm) {
boolean failed = false;
for (final String dir : localDirs) {
try {
createDir(localFs, new Path(dir), perm);
} catch (IOException e) {
LOG.warn("Unable to create directory " + dir + " error " +
e.getMessage() + ", removing from the list of valid directories.");
localDirs.remove(dir);
errorDirs.add(dir);
numFailures++;
failed = true;
}
}
return !failed;
}
/**
* Check the health of current set of local directories(good and failed),
* updating the list of valid directories if necessary.
*
* @return <em>true</em> if there is a new disk-failure identified in this
* checking or a failed directory passes the disk check <em>false</em>
* otherwise.
*/
synchronized boolean checkDirs() {
boolean setChanged = false;
Set<String> preCheckGoodDirs = new HashSet<String>(localDirs);
Set<String> preCheckFullDirs = new HashSet<String>(fullDirs);
Set<String> preCheckOtherErrorDirs = new HashSet<String>(errorDirs);
List<String> failedDirs = DirectoryCollection.concat(errorDirs, fullDirs);
List<String> allLocalDirs =
DirectoryCollection.concat(localDirs, failedDirs);
Map<String, DiskErrorInformation> dirsFailedCheck = testDirs(allLocalDirs);
localDirs.clear();
errorDirs.clear();
fullDirs.clear();
for (Map.Entry<String, DiskErrorInformation> entry : dirsFailedCheck
.entrySet()) {
String dir = entry.getKey();
DiskErrorInformation errorInformation = entry.getValue();
switch (entry.getValue().cause) {
case DISK_FULL:
fullDirs.add(entry.getKey());
break;
case OTHER:
errorDirs.add(entry.getKey());
break;
}
if (preCheckGoodDirs.contains(dir)) {
LOG.warn("Directory " + dir + " error, " + errorInformation.message
+ ", removing from list of valid directories");
setChanged = true;
numFailures++;
}
}
for (String dir : allLocalDirs) {
if (!dirsFailedCheck.containsKey(dir)) {
localDirs.add(dir);
if (preCheckFullDirs.contains(dir)
|| preCheckOtherErrorDirs.contains(dir)) {
setChanged = true;
LOG.info("Directory " + dir
+ " passed disk check, adding to list of valid directories.");
}
}
}
Set<String> postCheckFullDirs = new HashSet<String>(fullDirs);
Set<String> postCheckOtherDirs = new HashSet<String>(errorDirs);
for (String dir : preCheckFullDirs) {
if (postCheckOtherDirs.contains(dir)) {
LOG.warn("Directory " + dir + " error "
+ dirsFailedCheck.get(dir).message);
}
}
for (String dir : preCheckOtherErrorDirs) {
if (postCheckFullDirs.contains(dir)) {
LOG.warn("Directory " + dir + " error "
+ dirsFailedCheck.get(dir).message);
}
}
return setChanged;
}
Map<String, DiskErrorInformation> testDirs(List<String> dirs) {
HashMap<String, DiskErrorInformation> ret =
new HashMap<String, DiskErrorInformation>();
for (final String dir : dirs) {
String msg;
try {
File testDir = new File(dir);
DiskChecker.checkDir(testDir);
if (isDiskUsageOverPercentageLimit(testDir)) {
msg =
"used space above threshold of "
+ diskUtilizationPercentageCutoff
+ "%";
ret.put(dir,
new DiskErrorInformation(DiskErrorCause.DISK_FULL, msg));
continue;
} else if (isDiskFreeSpaceUnderLimit(testDir)) {
msg =
"free space below limit of " + diskUtilizationSpaceCutoff
+ "MB";
ret.put(dir,
new DiskErrorInformation(DiskErrorCause.DISK_FULL, msg));
continue;
}
// create a random dir to make sure fs isn't in read-only mode
verifyDirUsingMkdir(testDir);
} catch (IOException ie) {
ret.put(dir,
new DiskErrorInformation(DiskErrorCause.OTHER, ie.getMessage()));
}
}
return ret;
}
/**
* Function to test whether a dir is working correctly by actually creating a
* random directory.
*
* @param dir
* the dir to test
*/
private void verifyDirUsingMkdir(File dir) throws IOException {
String randomDirName = RandomStringUtils.randomAlphanumeric(5);
File target = new File(dir, randomDirName);
int i = 0;
while (target.exists()) {
randomDirName = RandomStringUtils.randomAlphanumeric(5) + i;
target = new File(dir, randomDirName);
i++;
}
try {
DiskChecker.checkDir(target);
} finally {
FileUtils.deleteQuietly(target);
}
}
private boolean isDiskUsageOverPercentageLimit(File dir) {
float freePercentage =
100 * (dir.getUsableSpace() / (float) dir.getTotalSpace());
float usedPercentage = 100.0F - freePercentage;
return (usedPercentage > diskUtilizationPercentageCutoff
|| usedPercentage >= 100.0F);
}
private boolean isDiskFreeSpaceUnderLimit(File dir) {
long freeSpace = dir.getUsableSpace() / (1024 * 1024);
return freeSpace < this.diskUtilizationSpaceCutoff;
}
private void createDir(FileContext localFs, Path dir, FsPermission perm)
throws IOException {
if (dir == null) {
return;
}
try {
localFs.getFileStatus(dir);
} catch (FileNotFoundException e) {
createDir(localFs, dir.getParent(), perm);
localFs.mkdir(dir, perm, false);
if (!perm.equals(perm.applyUMask(localFs.getUMask()))) {
localFs.setPermission(dir, perm);
}
}
}
public float getDiskUtilizationPercentageCutoff() {
return diskUtilizationPercentageCutoff;
}
public void setDiskUtilizationPercentageCutoff(
float diskUtilizationPercentageCutoff) {
this.diskUtilizationPercentageCutoff =
diskUtilizationPercentageCutoff < 0.0F ? 0.0F
: (diskUtilizationPercentageCutoff > 100.0F ? 100.0F
: diskUtilizationPercentageCutoff);
}
public long getDiskUtilizationSpaceCutoff() {
return diskUtilizationSpaceCutoff;
}
public void setDiskUtilizationSpaceCutoff(long diskUtilizationSpaceCutoff) {
diskUtilizationSpaceCutoff =
diskUtilizationSpaceCutoff < 0 ? 0 : diskUtilizationSpaceCutoff;
this.diskUtilizationSpaceCutoff = diskUtilizationSpaceCutoff;
}
}
| apache-2.0 |
pombredanne/brisk-hadoop-common | src/mapred/org/apache/hadoop/mapred/InterTrackerProtocol.java | 6894 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import org.apache.hadoop.ipc.VersionedProtocol;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.security.KerberosInfo;
/**
* Protocol that a TaskTracker and the central JobTracker use to communicate.
* The JobTracker is the Server, which implements this protocol.
*/
@KerberosInfo(
serverPrincipal = JobTracker.JT_USER_NAME,
clientPrincipal = TaskTracker.TT_USER_NAME)
interface InterTrackerProtocol extends VersionedProtocol {
/**
* version 3 introduced to replace
* emitHearbeat/pollForNewTask/pollForTaskWithClosedJob with
* {@link #heartbeat(TaskTrackerStatus, boolean, boolean, boolean, short)}
* version 4 changed TaskReport for HADOOP-549.
* version 5 introduced that removes locateMapOutputs and instead uses
* getTaskCompletionEvents to figure finished maps and fetch the outputs
* version 6 adds maxTasks to TaskTrackerStatus for HADOOP-1245
* version 7 replaces maxTasks by maxMapTasks and maxReduceTasks in
* TaskTrackerStatus for HADOOP-1274
* Version 8: HeartbeatResponse is added with the next heartbeat interval.
* version 9 changes the counter representation for HADOOP-2248
* version 10 changes the TaskStatus representation for HADOOP-2208
* version 11 changes string to JobID in getTaskCompletionEvents().
* version 12 changes the counters representation for HADOOP-1915
* version 13 added call getBuildVersion() for HADOOP-236
* Version 14: replaced getFilesystemName with getSystemDir for HADOOP-3135
* Version 15: Changed format of Task and TaskStatus for HADOOP-153
* Version 16: adds ResourceStatus to TaskTrackerStatus for HADOOP-3759
* Version 17: Changed format of Task and TaskStatus for HADOOP-3150
* Version 18: Changed status message due to changes in TaskStatus
* Version 19: Changed heartbeat to piggyback JobTracker restart information
so that the TaskTracker can synchronize itself.
* Version 20: Changed status message due to changes in TaskStatus
* (HADOOP-4232)
* Version 21: Changed information reported in TaskTrackerStatus'
* ResourceStatus and the corresponding accessor methods
* (HADOOP-4035)
* Version 22: Replaced parameter 'initialContact' with 'restarted'
* in heartbeat method (HADOOP-4305)
* Version 23: Added parameter 'initialContact' again in heartbeat method
* (HADOOP-4869)
* Version 24: Changed format of Task and TaskStatus for HADOOP-4759
* Version 25: JobIDs are passed in response to JobTracker restart
* Version 26: Added numRequiredSlots to TaskStatus for MAPREDUCE-516
* Version 27: Adding node health status to TaskStatus for MAPREDUCE-211
* Version 28: Adding user name to the serialized Task for use by TT.
*/
public static final long versionID = 28L;
public final static int TRACKERS_OK = 0;
public final static int UNKNOWN_TASKTRACKER = 1;
/**
* Called regularly by the {@link TaskTracker} to update the status of its
* tasks within the job tracker. {@link JobTracker} responds with a
* {@link HeartbeatResponse} that directs the
* {@link TaskTracker} to undertake a series of 'actions'
* (see {@link org.apache.hadoop.mapred.TaskTrackerAction.ActionType}).
*
* {@link TaskTracker} must also indicate whether this is the first
* interaction (since state refresh) and acknowledge the last response
* it recieved from the {@link JobTracker}
*
* @param status the status update
* @param restarted <code>true</code> if the process has just started or
* restarted, <code>false</code> otherwise
* @param initialContact <code>true</code> if this is first interaction since
* 'refresh', <code>false</code> otherwise.
* @param acceptNewTasks <code>true</code> if the {@link TaskTracker} is
* ready to accept new tasks to run.
* @param responseId the last responseId successfully acted upon by the
* {@link TaskTracker}.
* @return a {@link org.apache.hadoop.mapred.HeartbeatResponse} with
* fresh instructions.
*/
HeartbeatResponse heartbeat(TaskTrackerStatus status,
boolean restarted,
boolean initialContact,
boolean acceptNewTasks,
short responseId)
throws IOException;
/**
* The task tracker calls this once, to discern where it can find
* files referred to by the JobTracker
*/
public String getFilesystemName() throws IOException;
/**
* Report a problem to the job tracker.
* @param taskTracker the name of the task tracker
* @param errorClass the kind of error (eg. the class that was thrown)
* @param errorMessage the human readable error message
* @throws IOException if there was a problem in communication or on the
* remote side
*/
public void reportTaskTrackerError(String taskTracker,
String errorClass,
String errorMessage) throws IOException;
/**
* Get task completion events for the jobid, starting from fromEventId.
* Returns empty aray if no events are available.
* @param jobid job id
* @param fromEventId event id to start from.
* @param maxEvents the max number of events we want to look at
* @return array of task completion events.
* @throws IOException
*/
TaskCompletionEvent[] getTaskCompletionEvents(JobID jobid, int fromEventId
, int maxEvents) throws IOException;
/**
* Grab the jobtracker system directory path where job-specific files are to be placed.
*
* @return the system directory where job-specific files are to be placed.
*/
public String getSystemDir();
/**
* Returns the buildVersion of the JobTracker
*/
public String getBuildVersion() throws IOException;
}
| apache-2.0 |
deciament/presto | presto-main/src/main/java/com/facebook/presto/byteCode/control/TryCatch.java | 3146 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.byteCode.control;
import com.facebook.presto.byteCode.ByteCodeBlock;
import com.facebook.presto.byteCode.ByteCodeNode;
import com.facebook.presto.byteCode.ByteCodeVisitor;
import com.facebook.presto.byteCode.MethodGenerationContext;
import com.facebook.presto.byteCode.ParameterizedType;
import com.facebook.presto.byteCode.instruction.LabelNode;
import com.google.common.collect.ImmutableList;
import org.objectweb.asm.MethodVisitor;
import java.util.List;
import static java.util.Objects.requireNonNull;
public class TryCatch
implements FlowControl
{
private final String comment;
private final ByteCodeNode tryNode;
private final ByteCodeNode catchNode;
private final String exceptionName;
public TryCatch(String comment, ByteCodeNode tryNode, ByteCodeNode catchNode, ParameterizedType exceptionType)
{
this.comment = comment;
this.tryNode = requireNonNull(tryNode, "tryNode is null");
this.catchNode = requireNonNull(catchNode, "catchNode is null");
this.exceptionName = (exceptionType != null) ? exceptionType.getClassName() : null;
}
@Override
public String getComment()
{
return comment;
}
public ByteCodeNode getTryNode()
{
return tryNode;
}
public ByteCodeNode getCatchNode()
{
return catchNode;
}
public String getExceptionName()
{
return exceptionName;
}
@Override
public void accept(MethodVisitor visitor, MethodGenerationContext generationContext)
{
LabelNode tryStart = new LabelNode("tryStart");
LabelNode tryEnd = new LabelNode("tryEnd");
LabelNode handler = new LabelNode("handler");
LabelNode done = new LabelNode("done");
ByteCodeBlock block = new ByteCodeBlock();
// try block
block.visitLabel(tryStart)
.append(tryNode)
.visitLabel(tryEnd)
.gotoLabel(done);
// handler block
block.visitLabel(handler)
.append(catchNode);
// all done
block.visitLabel(done);
block.accept(visitor, generationContext);
visitor.visitTryCatchBlock(tryStart.getLabel(), tryEnd.getLabel(), handler.getLabel(), exceptionName);
}
@Override
public List<ByteCodeNode> getChildNodes()
{
return ImmutableList.of(tryNode, catchNode);
}
@Override
public <T> T accept(ByteCodeNode parent, ByteCodeVisitor<T> visitor)
{
return visitor.visitTryCatch(parent, this);
}
}
| apache-2.0 |
basicthinker/Quatrain-MapReduce | src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java | 13827 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.streaming;
import java.text.DecimalFormat;
import java.io.*;
import java.net.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.jar.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.JobConf;
/** Utilities not available elsewhere in Hadoop.
*
*/
public class StreamUtil {
/** It may seem strange to silently switch behaviour when a String
* is not a classname; the reason is simplified Usage:<pre>
* -mapper [classname | program ]
* instead of the explicit Usage:
* [-mapper program | -javamapper classname], -mapper and -javamapper are mutually exclusive.
* (repeat for -reducer, -combiner) </pre>
*/
public static Class goodClassOrNull(String className, String defaultPackage) {
if (className.indexOf('.') == -1 && defaultPackage != null) {
className = defaultPackage + "." + className;
}
Class clazz = null;
try {
clazz = Class.forName(className);
} catch (ClassNotFoundException cnf) {
} catch (LinkageError cnf) {
}
return clazz;
}
public static String findInClasspath(String className) {
return findInClasspath(className, StreamUtil.class.getClassLoader());
}
/** @return a jar file path or a base directory or null if not found.
*/
public static String findInClasspath(String className, ClassLoader loader) {
String relPath = className;
relPath = relPath.replace('.', '/');
relPath += ".class";
java.net.URL classUrl = loader.getResource(relPath);
String codePath;
if (classUrl != null) {
boolean inJar = classUrl.getProtocol().equals("jar");
codePath = classUrl.toString();
if (codePath.startsWith("jar:")) {
codePath = codePath.substring("jar:".length());
}
if (codePath.startsWith("file:")) { // can have both
codePath = codePath.substring("file:".length());
}
if (inJar) {
// A jar spec: remove class suffix in /path/my.jar!/package/Class
int bang = codePath.lastIndexOf('!');
codePath = codePath.substring(0, bang);
} else {
// A class spec: remove the /my/package/Class.class portion
int pos = codePath.lastIndexOf(relPath);
if (pos == -1) {
throw new IllegalArgumentException("invalid codePath: className=" + className
+ " codePath=" + codePath);
}
codePath = codePath.substring(0, pos);
}
} else {
codePath = null;
}
return codePath;
}
// copied from TaskRunner
static void unJar(File jarFile, File toDir) throws IOException {
JarFile jar = new JarFile(jarFile);
try {
Enumeration entries = jar.entries();
while (entries.hasMoreElements()) {
JarEntry entry = (JarEntry) entries.nextElement();
if (!entry.isDirectory()) {
InputStream in = jar.getInputStream(entry);
try {
File file = new File(toDir, entry.getName());
file.getParentFile().mkdirs();
OutputStream out = new FileOutputStream(file);
try {
byte[] buffer = new byte[8192];
int i;
while ((i = in.read(buffer)) != -1) {
out.write(buffer, 0, i);
}
} finally {
out.close();
}
} finally {
in.close();
}
}
}
} finally {
jar.close();
}
}
final static long KB = 1024L * 1;
final static long MB = 1024L * KB;
final static long GB = 1024L * MB;
final static long TB = 1024L * GB;
final static long PB = 1024L * TB;
static DecimalFormat dfm = new DecimalFormat("####.000");
static DecimalFormat ifm = new DecimalFormat("###,###,###,###,###");
public static String dfmt(double d) {
return dfm.format(d);
}
public static String ifmt(double d) {
return ifm.format(d);
}
public static String formatBytes(long numBytes) {
StringBuffer buf = new StringBuffer();
boolean bDetails = true;
double num = numBytes;
if (numBytes < KB) {
buf.append(numBytes).append(" B");
bDetails = false;
} else if (numBytes < MB) {
buf.append(dfmt(num / KB)).append(" KB");
} else if (numBytes < GB) {
buf.append(dfmt(num / MB)).append(" MB");
} else if (numBytes < TB) {
buf.append(dfmt(num / GB)).append(" GB");
} else if (numBytes < PB) {
buf.append(dfmt(num / TB)).append(" TB");
} else {
buf.append(dfmt(num / PB)).append(" PB");
}
if (bDetails) {
buf.append(" (").append(ifmt(numBytes)).append(" bytes)");
}
return buf.toString();
}
public static String formatBytes2(long numBytes) {
StringBuffer buf = new StringBuffer();
long u = 0;
if (numBytes >= TB) {
u = numBytes / TB;
numBytes -= u * TB;
buf.append(u).append(" TB ");
}
if (numBytes >= GB) {
u = numBytes / GB;
numBytes -= u * GB;
buf.append(u).append(" GB ");
}
if (numBytes >= MB) {
u = numBytes / MB;
numBytes -= u * MB;
buf.append(u).append(" MB ");
}
if (numBytes >= KB) {
u = numBytes / KB;
numBytes -= u * KB;
buf.append(u).append(" KB ");
}
buf.append(u).append(" B"); //even if zero
return buf.toString();
}
static Environment env;
static String HOST;
static {
try {
env = new Environment();
HOST = env.getHost();
} catch (IOException io) {
io.printStackTrace();
}
}
static class StreamConsumer extends Thread {
StreamConsumer(InputStream in, OutputStream out) {
this.bin = new LineNumberReader(new BufferedReader(new InputStreamReader(in)));
if (out != null) {
this.bout = new DataOutputStream(out);
}
}
public void run() {
try {
String line;
while ((line = bin.readLine()) != null) {
if (bout != null) {
bout.writeUTF(line); //writeChars
bout.writeChar('\n');
}
}
bout.flush();
} catch (IOException io) {
}
}
LineNumberReader bin;
DataOutputStream bout;
}
static void exec(String arg, PrintStream log) {
exec(new String[] { arg }, log);
}
static void exec(String[] args, PrintStream log) {
try {
log.println("Exec: start: " + Arrays.asList(args));
Process proc = Runtime.getRuntime().exec(args);
new StreamConsumer(proc.getErrorStream(), log).start();
new StreamConsumer(proc.getInputStream(), log).start();
int status = proc.waitFor();
//if status != 0
log.println("Exec: status=" + status + ": " + Arrays.asList(args));
} catch (InterruptedException in) {
in.printStackTrace();
} catch (IOException io) {
io.printStackTrace();
}
}
static String qualifyHost(String url) {
try {
return qualifyHost(new URL(url)).toString();
} catch (IOException io) {
return url;
}
}
static URL qualifyHost(URL url) {
try {
InetAddress a = InetAddress.getByName(url.getHost());
String qualHost = a.getCanonicalHostName();
URL q = new URL(url.getProtocol(), qualHost, url.getPort(), url.getFile());
return q;
} catch (IOException io) {
return url;
}
}
static final String regexpSpecials = "[]()?*+|.!^-\\~@";
public static String regexpEscape(String plain) {
StringBuffer buf = new StringBuffer();
char[] ch = plain.toCharArray();
int csup = ch.length;
for (int c = 0; c < csup; c++) {
if (regexpSpecials.indexOf(ch[c]) != -1) {
buf.append("\\");
}
buf.append(ch[c]);
}
return buf.toString();
}
public static String safeGetCanonicalPath(File f) {
try {
String s = f.getCanonicalPath();
return (s == null) ? f.toString() : s;
} catch (IOException io) {
return f.toString();
}
}
static String slurp(File f) throws IOException {
int len = (int) f.length();
byte[] buf = new byte[len];
FileInputStream in = new FileInputStream(f);
String contents = null;
try {
in.read(buf, 0, len);
contents = new String(buf, "UTF-8");
} finally {
in.close();
}
return contents;
}
static String slurpHadoop(Path p, FileSystem fs) throws IOException {
int len = (int) fs.getLength(p);
byte[] buf = new byte[len];
FSDataInputStream in = fs.open(p);
String contents = null;
try {
in.readFully(in.getPos(), buf);
contents = new String(buf, "UTF-8");
} finally {
in.close();
}
return contents;
}
public static String rjustify(String s, int width) {
if (s == null) s = "null";
if (width > s.length()) {
s = getSpace(width - s.length()) + s;
}
return s;
}
public static String ljustify(String s, int width) {
if (s == null) s = "null";
if (width > s.length()) {
s = s + getSpace(width - s.length());
}
return s;
}
static char[] space;
static {
space = new char[300];
Arrays.fill(space, '\u0020');
}
public static String getSpace(int len) {
if (len > space.length) {
space = new char[Math.max(len, 2 * space.length)];
Arrays.fill(space, '\u0020');
}
return new String(space, 0, len);
}
static private Environment env_;
static Environment env() {
if (env_ != null) {
return env_;
}
try {
env_ = new Environment();
} catch (IOException io) {
io.printStackTrace();
}
return env_;
}
public static String makeJavaCommand(Class main, String[] argv) {
ArrayList vargs = new ArrayList();
File javaHomeBin = new File(System.getProperty("java.home"), "bin");
File jvm = new File(javaHomeBin, "java");
vargs.add(jvm.toString());
// copy parent classpath
vargs.add("-classpath");
vargs.add("\"" + System.getProperty("java.class.path") + "\"");
// add heap-size limit
vargs.add("-Xmx" + Runtime.getRuntime().maxMemory());
// Add main class and its arguments
vargs.add(main.getName());
for (int i = 0; i < argv.length; i++) {
vargs.add(argv[i]);
}
return collate(vargs, " ");
}
public static String collate(Object[] args, String sep) {
return collate(Arrays.asList(args), sep);
}
public static String collate(List args, String sep) {
StringBuffer buf = new StringBuffer();
Iterator it = args.iterator();
while (it.hasNext()) {
if (buf.length() > 0) {
buf.append(" ");
}
buf.append(it.next());
}
return buf.toString();
}
// JobConf helpers
public static FileSplit getCurrentSplit(JobConf job) {
String path = job.get("map.input.file");
if (path == null) {
return null;
}
Path p = new Path(path);
long start = Long.parseLong(job.get("map.input.start"));
long length = Long.parseLong(job.get("map.input.length"));
return new FileSplit(p, start, length, job);
}
static class TaskId {
boolean mapTask;
String jobid;
int taskid;
int execid;
}
public static boolean isLocalJobTracker(JobConf job) {
return job.get("mapred.job.tracker", "local").equals("local");
}
public static TaskId getTaskInfo(JobConf job) {
TaskId res = new TaskId();
String id = job.get("mapred.task.id");
if (isLocalJobTracker(job)) {
// it uses difft naming
res.mapTask = job.getBoolean("mapred.task.is.map", true);
res.jobid = "0";
res.taskid = 0;
res.execid = 0;
} else {
String[] e = id.split("_");
res.mapTask = e[3].equals("m");
res.jobid = e[1] + "_" + e[2];
res.taskid = Integer.parseInt(e[4]);
res.execid = Integer.parseInt(e[5]);
}
return res;
}
public static void touch(File file) throws IOException {
file = file.getAbsoluteFile();
FileOutputStream out = new FileOutputStream(file);
out.close();
if (!file.exists()) {
throw new IOException("touch failed: " + file);
}
}
public static boolean isCygwin() {
String OS = System.getProperty("os.name");
return (OS.indexOf("Windows") > -1);
}
public static String localizeBin(String path) {
if (isCygwin()) {
path = "C:/cygwin/" + path;
}
return path;
}
/** @param name foo where <junit><sysproperty key="foo" value="${foo}"/>
* If foo is undefined then Ant sets the unevaluated value.
* Take this into account when setting defaultVal. */
public static String getBoundAntProperty(String name, String defaultVal)
{
String val = System.getProperty(name);
if (val != null && val.indexOf("${") >= 0) {
val = null;
}
if (val == null) {
val = defaultVal;
}
return val;
}
}
| apache-2.0 |
zfighter/giraph-research | giraph-core/target/munged/munged/main/org/apache/giraph/metrics/GiraphMetricsRegistry.java | 9285 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.metrics;
import org.apache.giraph.conf.GiraphConfiguration;
import com.yammer.metrics.core.Counter;
import com.yammer.metrics.core.Gauge;
import com.yammer.metrics.core.Histogram;
import com.yammer.metrics.core.Meter;
import com.yammer.metrics.core.Metric;
import com.yammer.metrics.core.MetricName;
import com.yammer.metrics.core.MetricPredicate;
import com.yammer.metrics.core.MetricsRegistry;
import com.yammer.metrics.core.Timer;
import com.yammer.metrics.reporting.ConsoleReporter;
import com.yammer.metrics.reporting.JmxReporter;
import java.io.PrintStream;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* A holder for MetricsRegistry together with a JmxReporter.
*/
public class GiraphMetricsRegistry {
/** String name of group to use for metrics created */
private String groupName;
/** String type to use for metrics created */
private String type;
/** Internal Yammer registry used */
private final MetricsRegistry registry;
/** JmxReporter that send metrics to JMX */
private final JmxReporter jmxReporter;
/**
* Constructor
* @param registry {@link MetricsRegistry} to use
* @param reporter {@link JmxReporter} to use
* @param groupName String grouping for metrics
* @param type String type name for metrics
*/
protected GiraphMetricsRegistry(MetricsRegistry registry,
JmxReporter reporter, String groupName, String type) {
this.registry = registry;
this.jmxReporter = reporter;
this.groupName = groupName;
this.type = type;
if (jmxReporter != null) {
jmxReporter.start();
}
}
/**
* Create no-op empty registry that makes no-op metrics.
* @return fake registry that makes no-op metrics
*/
public static GiraphMetricsRegistry createFake() {
return new GiraphMetricsRegistry(new NoOpMetricsRegistry(), null, "", "");
}
/**
* Create registry with group to use for metrics.
*
* @param groupName String group to use for metrics.
* @param type String type to use for metrics.
* @return new metrics registry
*/
public static GiraphMetricsRegistry createWithOptional(String groupName,
String type) {
MetricsRegistry registry = new MetricsRegistry();
return new GiraphMetricsRegistry(registry, new JmxReporter(registry),
groupName, type);
}
/**
* Create registry with Hadoop Configuration and group to use for metrics.
* Checks the configuration object for whether the optional metrics are
* enabled, and optionally creates those.
*
* @param conf Hadoop Configuration to use.
* @param groupName String group to use for metrics.
* @param type String type to use for metrics.
* @return new metrics registry
*/
public static GiraphMetricsRegistry create(GiraphConfiguration conf,
String groupName, String type) {
if (conf.metricsEnabled()) {
return createWithOptional(groupName, type);
} else {
return createFake();
}
}
/**
* Get map of all metrics.
*
* @return Map of all metrics held.
*/
public Map<MetricName, Metric> getAll() {
return registry.allMetrics();
}
/**
* Get group name used for metrics.
*
* @return String group name.
*/
public String getGroupName() {
return groupName;
}
/**
* Set group name used by this MetricsRegistry. Used for incrementing
* superstep number to create a new hierarchy of metrics per superstep.
*
* @param groupName String group name to use.
*/
protected void setGroupName(String groupName) {
this.groupName = groupName;
}
/**
* Get type used for new metrics created
*
* @return String type to use for metrics
*/
public String getType() {
return type;
}
/**
* Set type to use for new metrics
*
* @param type String type to use
*/
public void setType(String type) {
this.type = type;
}
/**
* Dump all the metrics to the PrintStream provided.
*
* @param out PrintStream to write metrics to.
*/
public void printToStream(PrintStream out) {
out.println("");
new ConsoleReporter(registry, out, MetricPredicate.ALL).run();
}
/**
* Get internal MetricsRegistry used.
*
* @return MetricsRegistry being used.
*/
protected MetricsRegistry getInternalRegistry() {
return registry;
}
/**
* Creates a new {@link com.yammer.metrics.core.Counter} and registers it
* under the given group and name.
*
* @param name the name of the metric
* @return a new {@link com.yammer.metrics.core.Counter}
*/
public Counter getCounter(String name) {
return registry.newCounter(makeMetricName(name));
}
/**
* Given a new {@link com.yammer.metrics.core.Gauge}, registers it under the
* given group and name.
*
* @param name the name of the metric
* @param metric the metric
* @param <T> the type of the value returned by the metric
* @return {@code metric}
*/
public <T> Gauge<T> getGauge(String name, Gauge<T> metric) {
return registry.newGauge(makeMetricName(name), metric);
}
/**
* Creates a new biased {@link Histogram} and registers it under the given
* group and name
*
* @param name name of metric
* @return new {@link Histogram}
*/
public Histogram getBiasedHistogram(String name) {
return getHistogram(name, true);
}
/**
* Creates a new uniform {@link Histogram} and registers it under the given
* group and name
*
* @param name name of metric
* @return new {@link Histogram}
*/
public Histogram getUniformHistogram(String name) {
return getHistogram(name, false);
}
/**
* Creates a new {@link Histogram} and registers it under the given group
* and name.
*
* @param name the name of the metric
* @param biased whether or not the histogram should be biased
* @return a new {@link Histogram}
*/
private Histogram getHistogram(String name, boolean biased) {
return registry.newHistogram(makeMetricName(name), biased);
}
/**
* Creates a new {@link com.yammer.metrics.core.Meter} and registers it under
* the given group and name.
*
* @param meterDesc description of meter
* @return new {@link com.yammer.metrics.core.Meter}
*/
public Meter getMeter(MeterDesc meterDesc) {
return getMeter(meterDesc.getName(), meterDesc.getType(),
meterDesc.getTimeUnit());
}
/**
* Creates a new {@link com.yammer.metrics.core.Meter} and registers it under
* the given group and name.
*
* @param name the name of the metric
* @param eventType the plural name of the type of events the meter is
* measuring (e.g., {@code "requests"})
* @param timeUnit the rate unit of the new meter
* @return a new {@link com.yammer.metrics.core.Meter}
*/
public Meter getMeter(String name, String eventType, TimeUnit timeUnit) {
return registry.newMeter(makeMetricName(name), eventType, timeUnit);
}
/**
* Create a new {@link Timer} from the description and registers it under the
* given group and name.
*
* @param timerDesc TimerDesc describing the timer
* @return new {@link Timer}
*/
public Timer getTimer(TimerDesc timerDesc) {
return getTimer(timerDesc.getName(), timerDesc.getDurationUnit(),
timerDesc.getTimeUnit());
}
/**
* Creates a new {@link Timer} and registers it under the given
* group and name.
*
* @param name the name of the metric
* @param durationUnit the duration scale unit of the new timer
* @param rateUnit the rate scale unit of the new timer
* @return a new {@link Timer}
*/
public Timer getTimer(String name, TimeUnit durationUnit, TimeUnit rateUnit) {
return registry.newTimer(makeMetricName(name), durationUnit, rateUnit);
}
/**
* Get a Gauge that is already present in the MetricsRegistry
*
* @param name String name of Gauge
* @param <T> value type Gauge returns
* @return Gauge<T> from MetricsRegistry
*/
public <T> Gauge<T> getExistingGauge(String name) {
Metric metric = registry.allMetrics().get(makeMetricName(name));
return metric instanceof Gauge ? (Gauge<T>) metric : null;
}
/**
* Create a MetricName using the job ID, group, and name.
*
* @param name String name given to metric
* @return MetricName for use with MetricsRegistry
*/
protected MetricName makeMetricName(String name) {
return new MetricName(groupName, type, name);
}
}
| apache-2.0 |
openweave/openweave-core | third_party/android/platform-libcore/android-platform-libcore/luni/src/test/java/org/apache/harmony/nio/tests/java/nio/ReadOnlyHeapCharBufferTest.java | 1168 | /* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.nio.tests.java.nio;
import dalvik.annotation.TestTargetClass;
@TestTargetClass(java.nio.CharBuffer.class)
public class ReadOnlyHeapCharBufferTest extends ReadOnlyCharBufferTest {
protected void setUp() throws Exception {
super.setUp();
}
protected void tearDown() throws Exception {
super.tearDown();
}
}
| apache-2.0 |
SeleniumHQ/selenium | java/src/org/openqa/selenium/remote/server/handler/ExecuteAsyncScript.java | 2218 | // Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.remote.server.handler;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.remote.server.Session;
import org.openqa.selenium.remote.server.handler.internal.ArgumentConverter;
import org.openqa.selenium.remote.server.handler.internal.ResultConverter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class ExecuteAsyncScript extends WebDriverHandler<Object> {
private volatile String script;
private final List<Object> args = new ArrayList<>();
public ExecuteAsyncScript(Session session) {
super(session);
}
@Override
public void setJsonParameters(Map<String, Object> allParameters) throws Exception {
super.setJsonParameters(allParameters);
script = (String) allParameters.get("script");
List<?> params = (List<?>) allParameters.get("args");
params.stream().map(new ArgumentConverter(getKnownElements())).forEach(args::add);
}
@Override
public Object call() {
Object value;
if (args.size() > 0) {
value = ((JavascriptExecutor) getDriver()).executeAsyncScript(script, args.toArray());
} else {
value = ((JavascriptExecutor) getDriver()).executeAsyncScript(script);
}
return new ResultConverter(getKnownElements()).apply(value);
}
@Override
public String toString() {
return String.format("[execute async script: %s, %s]", script, args);
}
}
| apache-2.0 |
mpage23/flyway | flyway-core/src/test/java/org/flywaydb/core/internal/dbsupport/postgresql/PostgreSQLSqlStatementBuilderSmallTest.java | 5421 | /**
* Copyright 2010-2015 Axel Fontaine
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flywaydb.core.internal.dbsupport.postgresql;
import org.flywaydb.core.internal.util.StringUtils;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Test for PostgreSQLSqlStatementBuilder.
*/
public class PostgreSQLSqlStatementBuilderSmallTest {
/**
* Class under test.
*/
private PostgreSQLSqlStatementBuilder statementBuilder = new PostgreSQLSqlStatementBuilder();
@Test
public void regclass() {
String sqlScriptSource = "CREATE TABLE base_table (\n" +
"base_table_id integer DEFAULT nextval('base_table_seq'::regclass) NOT NULL\n" +
");";
String[] lines = StringUtils.tokenizeToStringArray(sqlScriptSource, "\n");
for (String line : lines) {
statementBuilder.addLine(line);
}
assertTrue(statementBuilder.isTerminated());
}
@Test
public void function() {
String sqlScriptSource = "CREATE FUNCTION add(integer, integer) RETURNS integer\n" +
" LANGUAGE sql IMMUTABLE STRICT\n" +
" AS $_$select $1 + $2;$_$;\n";
String[] lines = StringUtils.tokenizeToStringArray(sqlScriptSource, "\n");
for (String line : lines) {
statementBuilder.addLine(line);
}
assertTrue(statementBuilder.isTerminated());
}
@Test
public void ts() {
String line = "insert into testDate values (TIMESTAMP '2004-10-19 10:23:54')";
statementBuilder.addLine(line + ";\n");
assertTrue(statementBuilder.isTerminated());
assertEquals(line, statementBuilder.getSqlStatement().getSql());
}
@Test
public void multilineStringLiteralWithSemicolons() {
String sqlScriptSource = "INSERT INTO address VALUES (1, '1. first;\n"
+ "2. second;\n"
+ "3. third;')";
String[] lines = StringUtils.tokenizeToStringArray(sqlScriptSource, "\n");
for (String line : lines) {
statementBuilder.addLine(line);
}
assertEquals(sqlScriptSource, statementBuilder.getSqlStatement().getSql());
}
@Test
public void multilineDollar() {
final String sqlScriptSource =
"INSERT INTO dollar VALUES($$Hello\n" +
"multi-line\n" +
"quotes;\n" +
"$$)";
String[] lines = StringUtils.tokenizeToStringArray(sqlScriptSource, "\n");
for (String line : lines) {
statementBuilder.addLine(line);
}
assertEquals(sqlScriptSource, statementBuilder.getSqlStatement().getSql());
}
@Test
public void multilineDollarNestedQuotes() {
final String sqlScriptSource =
"CREATE OR REPLACE FUNCTION upperFunc()\n" +
"RETURNS void AS $$\n" +
"DECLARE\n" +
"var varchar = 'abc';\n" +
"BEGIN\n" +
"raise info 'upperFunc';\n" +
"CREATE OR REPLACE FUNCTION internalFunc()\n" +
"RETURNS void AS $BODY$\n" +
"DECLARE\n" +
"var varchar1 = 'abc';\n" +
"BEGIN\n" +
"raise info 'internalFunc'\n" +
"END;\n" +
"$BODY$ LANGUAGE plpgsql;\n" +
"END;\n" +
"$$ LANGUAGE plpgsql";
String[] lines = StringUtils.tokenizeToStringArray(sqlScriptSource, "\n");
for (String line : lines) {
statementBuilder.addLine(line);
}
assertEquals(sqlScriptSource, statementBuilder.getSqlStatement().getSql());
}
@Test
public void dollarQuoteRegex() {
assertFalse("abc".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
assertFalse("abc$".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
assertFalse("$abc".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
assertTrue("$$".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
assertTrue("$abc$".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
assertTrue("$ABC$".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
assertTrue("$aBcDeF$".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
assertTrue("$aBc_DeF$".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
assertTrue("$abcDEF123$".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
assertTrue("$abcDEF123$xxx".matches(PostgreSQLSqlStatementBuilder.DOLLAR_QUOTE_REGEX));
}
} | apache-2.0 |
thomasdarimont/keycloak | services/src/main/java/org/keycloak/services/clientpolicy/executor/ConfidentialClientAcceptExecutor.java | 2690 | /*
* Copyright 2021 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.services.clientpolicy.executor;
import org.keycloak.OAuthErrorException;
import org.keycloak.models.ClientModel;
import org.keycloak.models.KeycloakSession;
import org.keycloak.representations.idm.ClientPolicyExecutorConfigurationRepresentation;
import org.keycloak.services.clientpolicy.ClientPolicyContext;
import org.keycloak.services.clientpolicy.ClientPolicyException;
/**
* @author <a href="mailto:takashi.norimatsu.ws@hitachi.com">Takashi Norimatsu</a>
*/
public class ConfidentialClientAcceptExecutor implements ClientPolicyExecutorProvider<ClientPolicyExecutorConfigurationRepresentation> {
protected final KeycloakSession session;
public ConfidentialClientAcceptExecutor(KeycloakSession session) {
this.session = session;
}
@Override
public String getProviderId() {
return ConfidentialClientAcceptExecutorFactory.PROVIDER_ID;
}
@Override
public void executeOnEvent(ClientPolicyContext context) throws ClientPolicyException {
switch (context.getEvent()) {
case AUTHORIZATION_REQUEST:
case TOKEN_REQUEST:
case SERVICE_ACCOUNT_TOKEN_REQUEST:
case BACKCHANNEL_AUTHENTICATION_REQUEST:
case BACKCHANNEL_TOKEN_REQUEST:
checkIsConfidentialClient();
return;
default:
return;
}
}
private void checkIsConfidentialClient() throws ClientPolicyException {
ClientModel client = session.getContext().getClient();
if (client == null) {
throw new ClientPolicyException(OAuthErrorException.INVALID_CLIENT, "invalid client access type");
}
if (client.isPublicClient()) {
throw new ClientPolicyException(OAuthErrorException.INVALID_CLIENT, "invalid client access type");
}
if (client.isBearerOnly()) {
throw new ClientPolicyException(OAuthErrorException.INVALID_CLIENT, "invalid client access type");
}
}
}
| apache-2.0 |
nicoben/pentaho-kettle | engine/test-src/org/pentaho/di/trans/steps/synchronizeaftermerge/SynchronizeAfterMergeMetaTest.java | 4568 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.synchronizeaftermerge;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.steps.loadsave.LoadSaveTester;
import org.pentaho.di.trans.steps.loadsave.initializer.InitializerInterface;
import org.pentaho.di.trans.steps.loadsave.validator.ArrayLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.BooleanLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.StringLoadSaveValidator;
public class SynchronizeAfterMergeMetaTest implements InitializerInterface<StepMetaInterface> {
LoadSaveTester loadSaveTester;
Class<SynchronizeAfterMergeMeta> testMetaClass = SynchronizeAfterMergeMeta.class;
@Before
public void setUpLoadSave() throws Exception {
KettleEnvironment.init();
PluginRegistry.init( true );
List<String> attributes =
Arrays.asList( "schemaName", "tableName", "databaseMeta", "commitSize", "tableNameInField", "tablenameField",
"operationOrderField", "useBatchUpdate", "performLookup", "OrderInsert", "OrderUpdate", "OrderDelete",
"keyStream", "keyLookup", "keyCondition", "keyStream2", "updateLookup", "updateStream", "update" );
Map<String, String> getterMap = new HashMap<String, String>() {
{
put( "tableNameInField", "istablenameInField" );
put( "tablenameField", "gettablenameField" );
put( "useBatchUpdate", "useBatchUpdate" );
}
};
Map<String, String> setterMap = new HashMap<String, String>() {
{
put( "tableNameInField", "settablenameInField" );
put( "tablenameField", "settablenameField" );
}
};
FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator =
new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 5 );
Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
attrValidatorMap.put( "keyStream", stringArrayLoadSaveValidator );
attrValidatorMap.put( "keyStream2", stringArrayLoadSaveValidator );
attrValidatorMap.put( "keyLookup", stringArrayLoadSaveValidator );
attrValidatorMap.put( "keyCondition", stringArrayLoadSaveValidator );
attrValidatorMap.put( "updateLookup", stringArrayLoadSaveValidator );
attrValidatorMap.put( "updateStream", stringArrayLoadSaveValidator );
attrValidatorMap.put( "update", new ArrayLoadSaveValidator<Boolean>( new BooleanLoadSaveValidator(), 5 ) );
Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
loadSaveTester =
new LoadSaveTester( testMetaClass, attributes, new ArrayList<String>(), new ArrayList<String>(),
getterMap, setterMap, attrValidatorMap, typeValidatorMap, this );
}
// Call the allocate method on the LoadSaveTester meta class
@Override
public void modify( StepMetaInterface someMeta ) {
if ( someMeta instanceof SynchronizeAfterMergeMeta ) {
( (SynchronizeAfterMergeMeta) someMeta ).allocate( 5, 5 );
}
}
@Test
public void testSerialization() throws KettleException {
loadSaveTester.testSerialization();
}
}
| apache-2.0 |
mpage23/flyway | flyway-core/src/main/java/org/flywaydb/core/internal/dbsupport/mysql/MySQLSchema.java | 6160 | /**
* Copyright 2010-2015 Axel Fontaine
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flywaydb.core.internal.dbsupport.mysql;
import org.flywaydb.core.internal.dbsupport.JdbcTemplate;
import org.flywaydb.core.internal.dbsupport.Schema;
import org.flywaydb.core.internal.dbsupport.Table;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* MySQL implementation of Schema.
*/
public class MySQLSchema extends Schema<MySQLDbSupport> {
/**
* Creates a new MySQL schema.
*
* @param jdbcTemplate The Jdbc Template for communicating with the DB.
* @param dbSupport The database-specific support.
* @param name The name of the schema.
*/
public MySQLSchema(JdbcTemplate jdbcTemplate, MySQLDbSupport dbSupport, String name) {
super(jdbcTemplate, dbSupport, name);
}
@Override
protected boolean doExists() throws SQLException {
return jdbcTemplate.queryForInt("SELECT COUNT(*) FROM information_schema.schemata WHERE schema_name=?", name) > 0;
}
@Override
protected boolean doEmpty() throws SQLException {
int objectCount = jdbcTemplate.queryForInt("Select "
+ "(Select count(*) from information_schema.TABLES Where TABLE_SCHEMA=?) + "
+ "(Select count(*) from information_schema.VIEWS Where TABLE_SCHEMA=?) + "
+ "(Select count(*) from information_schema.TABLE_CONSTRAINTS Where TABLE_SCHEMA=?) + "
+ "(Select count(*) from information_schema.EVENTS Where EVENT_SCHEMA=?) + "
+ "(Select count(*) from information_schema.ROUTINES Where ROUTINE_SCHEMA=?)",
name, name, name, name, name
);
return objectCount == 0;
}
@Override
protected void doCreate() throws SQLException {
jdbcTemplate.execute("CREATE SCHEMA " + dbSupport.quote(name));
}
@Override
protected void doDrop() throws SQLException {
jdbcTemplate.execute("DROP SCHEMA " + dbSupport.quote(name));
}
@Override
protected void doClean() throws SQLException {
for (String statement : cleanEvents()) {
jdbcTemplate.execute(statement);
}
for (String statement : cleanRoutines()) {
jdbcTemplate.execute(statement);
}
for (String statement : cleanViews()) {
jdbcTemplate.execute(statement);
}
jdbcTemplate.execute("SET FOREIGN_KEY_CHECKS = 0");
for (Table table : allTables()) {
table.drop();
}
jdbcTemplate.execute("SET FOREIGN_KEY_CHECKS = 1");
}
/**
* Generate the statements to clean the events in this schema.
*
* @return The list of statements.
* @throws SQLException when the clean statements could not be generated.
*/
private List<String> cleanEvents() throws SQLException {
List<Map<String, String>> eventNames =
jdbcTemplate.queryForList(
"SELECT event_name FROM information_schema.events WHERE event_schema=?",
name);
List<String> statements = new ArrayList<String>();
for (Map<String, String> row : eventNames) {
statements.add("DROP EVENT " + dbSupport.quote(name, row.get("event_name")));
}
return statements;
}
/**
* Generate the statements to clean the routines in this schema.
*
* @return The list of statements.
* @throws SQLException when the clean statements could not be generated.
*/
private List<String> cleanRoutines() throws SQLException {
List<Map<String, String>> routineNames =
jdbcTemplate.queryForList(
"SELECT routine_name, routine_type FROM information_schema.routines WHERE routine_schema=?",
name);
List<String> statements = new ArrayList<String>();
for (Map<String, String> row : routineNames) {
String routineName = row.get("routine_name");
String routineType = row.get("routine_type");
statements.add("DROP " + routineType + " " + dbSupport.quote(name, routineName));
}
return statements;
}
/**
* Generate the statements to clean the views in this schema.
*
* @return The list of statements.
* @throws SQLException when the clean statements could not be generated.
*/
private List<String> cleanViews() throws SQLException {
List<String> viewNames =
jdbcTemplate.queryForStringList(
"SELECT table_name FROM information_schema.views WHERE table_schema=?", name);
List<String> statements = new ArrayList<String>();
for (String viewName : viewNames) {
statements.add("DROP VIEW " + dbSupport.quote(name, viewName));
}
return statements;
}
@Override
protected Table[] doAllTables() throws SQLException {
List<String> tableNames = jdbcTemplate.queryForStringList(
"SELECT table_name FROM information_schema.tables WHERE table_schema=? AND table_type='BASE TABLE'", name);
Table[] tables = new Table[tableNames.size()];
for (int i = 0; i < tableNames.size(); i++) {
tables[i] = new MySQLTable(jdbcTemplate, dbSupport, this, tableNames.get(i));
}
return tables;
}
@Override
public Table getTable(String tableName) {
return new MySQLTable(jdbcTemplate, dbSupport, this, tableName);
}
}
| apache-2.0 |
asedunov/intellij-community | platform/dvcs-impl/src/com/intellij/dvcs/push/ui/PushTargetTextField.java | 2311 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.dvcs.push.ui;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.project.Project;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.textCompletion.DefaultTextCompletionValueDescriptor;
import com.intellij.util.textCompletion.TextCompletionProvider;
import com.intellij.util.textCompletion.TextFieldWithCompletion;
import com.intellij.util.textCompletion.ValuesCompletionProvider.ValuesCompletionProviderDumbAware;
import org.jetbrains.annotations.NotNull;
import java.awt.event.FocusAdapter;
import java.awt.event.FocusEvent;
import java.util.List;
public class PushTargetTextField extends TextFieldWithCompletion {
public PushTargetTextField(@NotNull Project project, @NotNull List<String> targetVariants, @NotNull String defaultTargetName) {
super(project, getCompletionProvider(targetVariants), defaultTargetName, true, true, true);
addFocusListener(new FocusAdapter() {
@Override
public void focusGained(FocusEvent e) {
selectAll();
}
@Override
public void focusLost(FocusEvent e) {
removeSelection();
}
});
}
@Override
protected void updateBorder(@NotNull final EditorEx editor) {
}
@NotNull
private static TextCompletionProvider getCompletionProvider(@NotNull final List<String> targetVariants) {
return new ValuesCompletionProviderDumbAware<>(new DefaultTextCompletionValueDescriptor.StringValueDescriptor() {
@Override
public int compare(String item1, String item2) {
return Integer.compare(ContainerUtil.indexOf(targetVariants, item1), ContainerUtil.indexOf(targetVariants, item2));
}
}, targetVariants);
}
}
| apache-2.0 |
echinopsii/net.echinopsii.3rdparty.blueprints | blueprints-test/src/test/java/com/tinkerpop/blueprints/GraphFactoryTest.java | 1089 | package com.tinkerpop.blueprints;
import com.tinkerpop.blueprints.impls.tg.TinkerGraph;
import junit.framework.TestCase;
import org.apache.commons.configuration.BaseConfiguration;
import org.apache.commons.configuration.Configuration;
import java.util.HashMap;
import java.util.Map;
/**
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class GraphFactoryTest extends TestCase {
public void testOpenInMemoryTinkerGraphViaApacheConfig(){
final Configuration conf = new BaseConfiguration();
conf.setProperty("blueprints.graph", "com.tinkerpop.blueprints.impls.tg.TinkerGraph");
final Graph g = GraphFactory.open(conf);
assertNotNull(g);
assertTrue(g instanceof TinkerGraph);
}
public void testOpenInMemoryTinkerGraphViaMap(){
final Map<String,Object> conf = new HashMap<String,Object>();
conf.put("blueprints.graph", "com.tinkerpop.blueprints.impls.tg.TinkerGraph");
final Graph g = GraphFactory.open(conf);
assertNotNull(g);
assertTrue(g instanceof TinkerGraph);
}
}
| bsd-3-clause |
erpcya/adempierePOS | zkwebui/WEB-INF/src/org/adempiere/webui/apps/wf/WFPanel.java | 8196 | /******************************************************************************
* Copyright (C) 2008 Low Heng Sin *
* This program is free software; you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY; without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program; if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
*****************************************************************************/
package org.adempiere.webui.apps.wf;
import java.awt.Dimension;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import java.io.ByteArrayOutputStream;
import java.util.Properties;
import java.util.logging.Level;
import javax.imageio.ImageIO;
import org.adempiere.webui.exception.ApplicationException;
import org.adempiere.webui.session.SessionManager;
import org.compiere.apps.wf.WFLine;
import org.compiere.util.CLogger;
import org.compiere.util.Env;
import org.compiere.wf.MWFNode;
import org.compiere.wf.MWFNodeNext;
import org.compiere.wf.MWorkflow;
import org.zkoss.image.AImage;
import org.zkoss.zk.ui.event.Event;
import org.zkoss.zk.ui.event.EventListener;
import org.zkoss.zk.ui.event.Events;
import org.zkoss.zk.ui.event.MouseEvent;
import org.zkoss.zkex.zul.Borderlayout;
import org.zkoss.zkex.zul.Center;
import org.zkoss.zkex.zul.South;
import org.zkoss.zul.Area;
import org.zkoss.zul.Div;
import org.zkoss.zul.Html;
import org.zkoss.zul.Imagemap;
/**
* WorkFlow Panel
*
* @author Low Heng Sin
*/
public class WFPanel extends Borderlayout implements EventListener
{
/**
*
*/
private static final long serialVersionUID = 8777798080154603970L;
/**
* Create Workflow Panel
*/
public WFPanel ()
{
try
{
jbInit();
}
catch(Exception e)
{
log.log(Level.SEVERE, "WFPanel", e);
}
m_WindowNo = SessionManager.getAppDesktop().registerWindow(this);
} // WFPanel
/** Window No */
private int m_WindowNo = 0;
/** Workflow Model */
private MWorkflow m_wf = null;
/** Context */
private Properties m_ctx = Env.getCtx();
/** Logger */
private static CLogger log = CLogger.getCLogger(WFPanel.class);
// IO
private WFNodeContainer nodeContainer = new WFNodeContainer();
private Html infoTextPane = new Html();
private Div contentPanel = new Div();
//
/**
* Static Init
* <pre>
* centerScrollPane
* centerPanel
* south Panel
* infoScrollPane
* buttonPanel
* </pre>
* @throws Exception
*/
private void jbInit() throws Exception
{
this.setStyle("height: 100%; width: 100%; position: absolute");
Center center = new Center();
this.appendChild(center);
center.appendChild(contentPanel);
contentPanel.setStyle("width: 100%; heigh: 100%;");
center.setAutoscroll(true);
center.setFlex(true);
South south = new South();
this.appendChild(south);
south.appendChild(infoTextPane);
south.setHeight("15%");
south.setSplittable(true);
south.setCollapsible(true);
south.setAutoscroll(true);
south.setFlex(true);
} // jbInit
/**
* Dispose
* @see org.compiere.apps.form.FormPanel#dispose()
*/
public void dispose()
{
SessionManager.getAppDesktop().closeActiveWindow();
} // dispose
/**
* Load Workflow & Nodes
* @param AD_Workflow_ID ID
*/
public void load (int AD_Workflow_ID)
{
log.fine("AD_Workflow_ID=" + AD_Workflow_ID);
if (AD_Workflow_ID == 0)
return;
int AD_Client_ID = Env.getAD_Client_ID(Env.getCtx());
// Get Workflow
m_wf = new MWorkflow (Env.getCtx(), AD_Workflow_ID, null);
nodeContainer.removeAll();
nodeContainer.setWorkflow(m_wf);
// Add Nodes for Paint
MWFNode[] nodes = m_wf.getNodes(true, AD_Client_ID);
for (int i = 0; i < nodes.length; i++)
{
WFNode wfn = new WFNode (nodes[i]);
nodeContainer.add (wfn);
// Add Lines
MWFNodeNext[] nexts = nodes[i].getTransitions(AD_Client_ID);
for (int j = 0; j < nexts.length; j++)
nodeContainer.add (new WFLine (nexts[j]));
}
Dimension dimension = nodeContainer.getDimension();
BufferedImage bi = new BufferedImage (dimension.width + 2, dimension.height + 2, BufferedImage.TYPE_INT_ARGB);
nodeContainer.paint(bi.createGraphics());
ByteArrayOutputStream os = new ByteArrayOutputStream();
try {
ImageIO.write(bi, "png", os);
AImage imageContent = new AImage("workflow.png", os.toByteArray());
Imagemap image = new Imagemap();
image.setWidth(dimension.width + "px");
image.setHeight(dimension.height + "px");
image.setContent(imageContent);
contentPanel.appendChild(image);
image.addEventListener(Events.ON_CLICK, this);
for(WFNode node : nodeContainer.getNodes()) {
Area area = new Area();
Rectangle rect = node.getBounds();
area.setCoords(rect.x + "," + rect.y + "," + (rect.x+rect.width) + ","
+ (rect.y+rect.height));
image.appendChild(area);
area.setId("WFN_"+node.getAD_WF_Node_ID());
StringBuffer tooltip = new StringBuffer();
String s = node.getNode().getDescription(true);
if (s != null && s.trim().length() > 0)
tooltip.append(s);
String h = node.getNode().getHelp(true);
if (h != null && h.trim().length() > 0) {
if (tooltip.length() > 0)
tooltip.append(". ");
tooltip.append(h);
}
area.setTooltiptext(tooltip.toString());
}
} catch (Exception e) {
log.log(Level.SEVERE, e.getLocalizedMessage(), e);
}
// Info Text
StringBuffer msg = new StringBuffer("");
msg.append("<H2>").append(m_wf.getName(true)).append("</H2>");
String s = m_wf.getDescription(true);
if (s != null && s.length() > 0)
msg.append("<B>").append(s).append("</B>");
s = m_wf.getHelp(true);
if (s != null && s.length() > 0)
msg.append("<BR>").append(s);
infoTextPane.setContent(msg.toString());
} // load
/**
* String Representation
* @return info
*/
public String toString()
{
StringBuffer sb = new StringBuffer("WorkflowPanel[");
if (m_wf != null)
sb.append(m_wf.getAD_Workflow_ID());
sb.append("]");
return sb.toString();
} // toString
public MWorkflow getWorkflow()
{
return m_wf;
}
public void onEvent(Event event) throws Exception {
if (Events.ON_CLICK.equals(event.getName()) && event instanceof MouseEvent) {
MouseEvent me = (MouseEvent) event;
String areaId = me.getArea();
if (areaId != null && areaId.startsWith("WFN_")) {
int id = Integer.valueOf(areaId.substring(4));
for(WFNode node : nodeContainer.getNodes()) {
if (node.getAD_WF_Node_ID() == id) {
start(node);
break;
}
}
}
}
}
private void start(WFNode node) {
MWFNode wfn = node.getNode();
if (wfn.getAD_Window_ID() > 0) {
SessionManager.getAppDesktop().openWindow(wfn.getAD_Window_ID());
} else if (wfn.getAD_Form_ID() > 0) {
SessionManager.getAppDesktop().openForm(wfn.getAD_Form_ID());
} else if (wfn.getAD_Browse_ID() > 0) {
SessionManager.getAppDesktop().openBrowse(wfn.getAD_Browse_ID());
} else if (wfn.getAD_Process_ID() > 0) {
SessionManager.getAppDesktop().openProcessDialog(wfn.getAD_Process_ID(), false);
} else if (wfn.getAD_Task_ID() > 0) {
SessionManager.getAppDesktop().openTask(wfn.getAD_Task_ID());
} else if (wfn.getWorkflow_ID() > 0) {
SessionManager.getAppDesktop().openWorkflow(wfn.getWorkflow_ID());
} else {
throw new ApplicationException("Action not yet implemented: " + wfn.getAction());
}
}
} // WFPanel
| gpl-2.0 |
jmdjr/BitPlus | src_vendor/org/objectweb/asm/tree/analysis/BasicInterpreter.java | 11549 | /***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.objectweb.asm.tree.analysis;
import java.util.List;
import org.objectweb.asm.Handle;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.tree.AbstractInsnNode;
import org.objectweb.asm.tree.FieldInsnNode;
import org.objectweb.asm.tree.IntInsnNode;
import org.objectweb.asm.tree.InvokeDynamicInsnNode;
import org.objectweb.asm.tree.LdcInsnNode;
import org.objectweb.asm.tree.MethodInsnNode;
import org.objectweb.asm.tree.MultiANewArrayInsnNode;
import org.objectweb.asm.tree.TypeInsnNode;
/**
* An {@link Interpreter} for {@link BasicValue} values.
*
* @author Eric Bruneton
* @author Bing Ran
*/
public class BasicInterpreter extends Interpreter<BasicValue> implements
Opcodes {
public BasicInterpreter() {
super(ASM5);
}
protected BasicInterpreter(final int api) {
super(api);
}
@Override
public BasicValue newValue(final Type type) {
if (type == null) {
return BasicValue.UNINITIALIZED_VALUE;
}
switch (type.getSort()) {
case Type.VOID:
return null;
case Type.BOOLEAN:
case Type.CHAR:
case Type.BYTE:
case Type.SHORT:
case Type.INT:
return BasicValue.INT_VALUE;
case Type.FLOAT:
return BasicValue.FLOAT_VALUE;
case Type.LONG:
return BasicValue.LONG_VALUE;
case Type.DOUBLE:
return BasicValue.DOUBLE_VALUE;
case Type.ARRAY:
case Type.OBJECT:
return BasicValue.REFERENCE_VALUE;
default:
throw new Error("Internal error");
}
}
@Override
public BasicValue newOperation(final AbstractInsnNode insn)
throws AnalyzerException {
switch (insn.opcode()) {
case ACONST_NULL:
return newValue(Type.getObjectType("null"));
case ICONST_M1:
case ICONST_0:
case ICONST_1:
case ICONST_2:
case ICONST_3:
case ICONST_4:
case ICONST_5:
return BasicValue.INT_VALUE;
case LCONST_0:
case LCONST_1:
return BasicValue.LONG_VALUE;
case FCONST_0:
case FCONST_1:
case FCONST_2:
return BasicValue.FLOAT_VALUE;
case DCONST_0:
case DCONST_1:
return BasicValue.DOUBLE_VALUE;
case BIPUSH:
case SIPUSH:
return BasicValue.INT_VALUE;
case LDC:
Object cst = ((LdcInsnNode) insn).cst;
if (cst instanceof Integer) {
return BasicValue.INT_VALUE;
} else if (cst instanceof Float) {
return BasicValue.FLOAT_VALUE;
} else if (cst instanceof Long) {
return BasicValue.LONG_VALUE;
} else if (cst instanceof Double) {
return BasicValue.DOUBLE_VALUE;
} else if (cst instanceof String) {
return newValue(Type.getObjectType("java/lang/String"));
} else if (cst instanceof Type) {
int sort = ((Type) cst).getSort();
if (sort == Type.OBJECT || sort == Type.ARRAY) {
return newValue(Type.getObjectType("java/lang/Class"));
} else if (sort == Type.METHOD) {
return newValue(Type
.getObjectType("java/lang/invoke/MethodType"));
} else {
throw new IllegalArgumentException("Illegal LDC constant "
+ cst);
}
} else if (cst instanceof Handle) {
return newValue(Type
.getObjectType("java/lang/invoke/MethodHandle"));
} else {
throw new IllegalArgumentException("Illegal LDC constant "
+ cst);
}
case JSR:
return BasicValue.RETURNADDRESS_VALUE;
case GETSTATIC:
return newValue(Type.getType(((FieldInsnNode) insn).desc));
case NEW:
return newValue(Type.getObjectType(((TypeInsnNode) insn).desc));
default:
throw new Error("Internal error.");
}
}
@Override
public BasicValue copyOperation(final AbstractInsnNode insn,
final BasicValue value) throws AnalyzerException {
return value;
}
@Override
public BasicValue unaryOperation(final AbstractInsnNode insn,
final BasicValue value) throws AnalyzerException {
switch (insn.opcode()) {
case INEG:
case IINC:
case L2I:
case F2I:
case D2I:
case I2B:
case I2C:
case I2S:
return BasicValue.INT_VALUE;
case FNEG:
case I2F:
case L2F:
case D2F:
return BasicValue.FLOAT_VALUE;
case LNEG:
case I2L:
case F2L:
case D2L:
return BasicValue.LONG_VALUE;
case DNEG:
case I2D:
case L2D:
case F2D:
return BasicValue.DOUBLE_VALUE;
case IFEQ:
case IFNE:
case IFLT:
case IFGE:
case IFGT:
case IFLE:
case TABLESWITCH:
case LOOKUPSWITCH:
case IRETURN:
case LRETURN:
case FRETURN:
case DRETURN:
case ARETURN:
case PUTSTATIC:
return null;
case GETFIELD:
return newValue(Type.getType(((FieldInsnNode) insn).desc));
case NEWARRAY:
switch (((IntInsnNode) insn).operand) {
case T_BOOLEAN:
return newValue(Type.getType("[Z"));
case T_CHAR:
return newValue(Type.getType("[C"));
case T_BYTE:
return newValue(Type.getType("[B"));
case T_SHORT:
return newValue(Type.getType("[S"));
case T_INT:
return newValue(Type.getType("[I"));
case T_FLOAT:
return newValue(Type.getType("[F"));
case T_DOUBLE:
return newValue(Type.getType("[D"));
case T_LONG:
return newValue(Type.getType("[J"));
default:
throw new AnalyzerException(insn, "Invalid array type");
}
case ANEWARRAY:
String desc = ((TypeInsnNode) insn).desc;
return newValue(Type.getType("[" + Type.getObjectType(desc)));
case ARRAYLENGTH:
return BasicValue.INT_VALUE;
case ATHROW:
return null;
case CHECKCAST:
desc = ((TypeInsnNode) insn).desc;
return newValue(Type.getObjectType(desc));
case INSTANCEOF:
return BasicValue.INT_VALUE;
case MONITORENTER:
case MONITOREXIT:
case IFNULL:
case IFNONNULL:
return null;
default:
throw new Error("Internal error.");
}
}
@Override
public BasicValue binaryOperation(final AbstractInsnNode insn,
final BasicValue value1, final BasicValue value2)
throws AnalyzerException {
switch (insn.opcode()) {
case IALOAD:
case BALOAD:
case CALOAD:
case SALOAD:
case IADD:
case ISUB:
case IMUL:
case IDIV:
case IREM:
case ISHL:
case ISHR:
case IUSHR:
case IAND:
case IOR:
case IXOR:
return BasicValue.INT_VALUE;
case FALOAD:
case FADD:
case FSUB:
case FMUL:
case FDIV:
case FREM:
return BasicValue.FLOAT_VALUE;
case LALOAD:
case LADD:
case LSUB:
case LMUL:
case LDIV:
case LREM:
case LSHL:
case LSHR:
case LUSHR:
case LAND:
case LOR:
case LXOR:
return BasicValue.LONG_VALUE;
case DALOAD:
case DADD:
case DSUB:
case DMUL:
case DDIV:
case DREM:
return BasicValue.DOUBLE_VALUE;
case AALOAD:
return BasicValue.REFERENCE_VALUE;
case LCMP:
case FCMPL:
case FCMPG:
case DCMPL:
case DCMPG:
return BasicValue.INT_VALUE;
case IF_ICMPEQ:
case IF_ICMPNE:
case IF_ICMPLT:
case IF_ICMPGE:
case IF_ICMPGT:
case IF_ICMPLE:
case IF_ACMPEQ:
case IF_ACMPNE:
case PUTFIELD:
return null;
default:
throw new Error("Internal error.");
}
}
@Override
public BasicValue ternaryOperation(final AbstractInsnNode insn,
final BasicValue value1, final BasicValue value2,
final BasicValue value3) throws AnalyzerException {
return null;
}
@Override
public BasicValue naryOperation(final AbstractInsnNode insn,
final List<? extends BasicValue> values) throws AnalyzerException {
int opcode = insn.opcode();
if (opcode == MULTIANEWARRAY) {
return newValue(Type.getType(((MultiANewArrayInsnNode) insn).desc));
} else if (opcode == INVOKEDYNAMIC) {
return newValue(Type
.getReturnType(((InvokeDynamicInsnNode) insn).desc));
} else {
return newValue(Type.getReturnType(((MethodInsnNode) insn).desc));
}
}
@Override
public void returnOperation(final AbstractInsnNode insn,
final BasicValue value, final BasicValue expected)
throws AnalyzerException {
}
@Override
public BasicValue merge(final BasicValue v, final BasicValue w) {
if (!v.equals(w)) {
return BasicValue.UNINITIALIZED_VALUE;
}
return v;
}
}
| gpl-3.0 |
jtux270/translate | ovirt/backend/manager/tools/src/main/java/org/ovirt/engine/core/notifier/dao/DispatchResult.java | 1730 | package org.ovirt.engine.core.notifier.dao;
import org.ovirt.engine.core.common.EventNotificationMethod;
import org.ovirt.engine.core.notifier.filter.AuditLogEvent;
public class DispatchResult {
private final AuditLogEvent event;
private final String address;
private final EventNotificationMethod NotificationMethod;
private final boolean success;
private final String errorMessage;
private DispatchResult(
AuditLogEvent event,
String address,
EventNotificationMethod NotificationMethod,
boolean success,
String errorMessage) {
this.event = event;
this.address = address;
this.NotificationMethod = NotificationMethod;
this.success = success;
this.errorMessage = errorMessage;
}
public static DispatchResult success(
AuditLogEvent event,
String address,
EventNotificationMethod NotificationMethod) {
return new DispatchResult(event, address, NotificationMethod, true, null);
}
public static DispatchResult failure(
AuditLogEvent event,
String address,
EventNotificationMethod NotificationMethod,
String errorMessage) {
return new DispatchResult(event, address, NotificationMethod, false, errorMessage);
}
public AuditLogEvent getEvent() {
return event;
}
public String getAddress() {
return address;
}
public EventNotificationMethod getNotificationMethod() {
return NotificationMethod;
}
public boolean isSuccess() {
return success;
}
public String getErrorMessage() {
return errorMessage;
}
}
| gpl-3.0 |
mbroadst/rethinkdb | drivers/java/src/main/java/com/rethinkdb/gen/ast/Desc.java | 683 | // Autogenerated by metajava.py.
// Do not edit this file directly.
// The template for this file is located at:
// ../../../../../../../../templates/AstSubclass.java
package com.rethinkdb.gen.ast;
import com.rethinkdb.gen.proto.TermType;
import com.rethinkdb.gen.exc.ReqlDriverError;
import com.rethinkdb.model.Arguments;
import com.rethinkdb.model.OptArgs;
import com.rethinkdb.ast.ReqlAst;
public class Desc extends ReqlExpr {
public Desc(Object arg) {
this(new Arguments(arg), null);
}
public Desc(Arguments args){
this(args, null);
}
public Desc(Arguments args, OptArgs optargs) {
super(TermType.DESC, args, optargs);
}
}
| agpl-3.0 |
rmswimkktt/checkstyle | src/main/java/com/puppycrawl/tools/checkstyle/checks/AvoidEscapedUnicodeCharactersCheck.java | 12213 | ////////////////////////////////////////////////////////////////////////////////
// checkstyle: Checks Java source code for adherence to a set of rules.
// Copyright (C) 2001-2015 the original author or authors.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
////////////////////////////////////////////////////////////////////////////////
package com.puppycrawl.tools.checkstyle.checks;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.puppycrawl.tools.checkstyle.api.Check;
import com.puppycrawl.tools.checkstyle.api.DetailAST;
import com.puppycrawl.tools.checkstyle.api.TokenTypes;
/**
* <p>
* Restrict using <a href =
* "http://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-3.3">
* Unicode escapes</a> (e.g. \u221e).
* It is possible to allow using escapes for
* <a href="https://en.wiktionary.org/wiki/Appendix:Control_characters">
* non-printable(control) characters</a>.
* Also, this check can be configured to allow using escapes
* if trail comment is present. By the option it is possible to
* allow using escapes if literal contains only them. By the option it
* is possible to allow using escapes for space literals.
* </p>
* <p>
* Examples of using Unicode:</p>
* <pre>
* String unitAbbrev = "μs"; //Best: perfectly clear even without a comment.
* String unitAbbrev = "\u03bcs"; //Poor: the reader has no idea what this is.
* </pre>
* <p>
* An example of how to configure the check is:
* </p>
* <pre>
* <module name="AvoidEscapedUnicodeCharacters"/>
* </pre>
* <p>
* An example of non-printable(control) characters.
* </p>
* <pre>
* return '\ufeff' + content; // byte order mark
* </pre>
* <p>
* An example of how to configure the check to allow using escapes
* for non-printable(control) characters:
* </p>
* <pre>
* <module name="AvoidEscapedUnicodeCharacters">
* <property name="allowEscapesForControlCharacters" value="true"/>
* </module>
* </pre>
* <p>
* Example of using escapes with trail comment:
* </p>
* <pre>
* String unitAbbrev = "\u03bcs"; // Greek letter mu, "s"
* </pre>
* <p>An example of how to configure the check to allow using escapes
* if trail comment is present:
* </p>
* <pre>
* <module name="AvoidEscapedUnicodeCharacters">
* <property name="allowByTailComment" value="true"/>
* </module>
* </pre>
* <p>Example of using escapes if literal contains only them:
* </p>
* <pre>
* String unitAbbrev = "\u03bc\u03bc\u03bc";
* </pre>
* <p>An example of how to configure the check to allow escapes
* if literal contains only them:
* </p>
* <pre>
* <module name="AvoidEscapedUnicodeCharacters">
* <property name="allowIfAllCharactersEscaped" value="true"/>
* </module>
* </pre>
* <p>An example of how to configure the check to allow non-printable escapes:
* </p>
* <pre>
* <module name="AvoidEscapedUnicodeCharacters">
* <property name="allowNonPrintableEscapes" value="true"/>
* </module>
* </pre>
*
* @author maxvetrenko
*
*/
public class AvoidEscapedUnicodeCharactersCheck
extends Check {
/** Regular expression for Unicode chars. */
private static final Pattern UNICODE_REGEXP = Pattern.compile("\\\\u[a-fA-F0-9]{4}");
/** Regular expression Unicode control characters. */
private static final Pattern UNICODE_CONTROL = Pattern.compile("\\\\(u|U)"
+ "(00[0-1][0-1A-Fa-f]|00[8-9][0-9A-Fa-f]|034(f|F)|070(f|F)"
+ "|180(e|E)|200[b-fB-F]|202[b-eB-E]|206[0-4a-fA-F]"
+ "|[fF]{3}[9a-bA-B]|[fF][eE][fF]{2})");
/** Regular expression for trail comment. */
private static final Pattern COMMENT_REGEXP = Pattern.compile(";[ ]*//+"
+ "[a-zA-Z0-9 ]*|;[ ]*/[*]+[a-zA-Z0-9 ]*");
/** Regular expression for all escaped chars. */
private static final Pattern ALL_ESCAPED_CHARS =
Pattern.compile("^((\\\\u)[a-fA-F0-9]{4}"
+ "||\\\\b|\\\\t|\\\\n|\\\\f|\\\\r|\\\\|\"|\')+$");
/** Regular expression for non-printable unicode chars. */
private static final Pattern NON_PRINTABLE_CHARS = Pattern.compile("\\\\u1680|\\\\u2028"
+ "|\\\\u2029|\\\\u205(f|F)|\\\\u3000|\\\\u2007|\\\\u2000|\\\\u200(a|A)"
+ "|\\\\u007(F|f)|\\\\u009(f|F)|\\\\u(f|F){4}|\\\\u007(F|f)|\\\\u00(a|A)(d|D)"
+ "|\\\\u0600|\\\\u061(c|C)|\\\\u06(d|D){2}|\\\\u070(f|F)|\\\\u1680|\\\\u180(e|E)"
+ "|\\\\u2000|\\\\u2028|\\\\u205(f|F)|\\\\u2066|\\\\u2067|\\\\u2068|\\\\u2069"
+ "|\\\\u206(a|A)|\\\\u(d|D)800|\\\\u(f|F)(e|E)(f|F){2}|\\\\u(f|F){3}9"
+ "|\\\\u(f|F){3}(a|A)|\\\\u0020|\\\\u00(a|A)0|\\\\u00(a|A)(d|D)|\\\\u0604"
+ "|\\\\u061(c|C)|\\\\u06(d|D){2}|\\\\u070(f|F)|\\\\u1680|\\\\u180(e|E)|\\\\u200(f|F)"
+ "|\\\\u202(f|F)|\\\\u2064|\\\\u2066|\\\\u2067|\\\\u2068|\\\\u2069|\\\\u206(f|F)"
+ "|\\\\u(f|F)8(f|F){2}|\\\\u(f|F)(e|E)(f|F){2}|\\\\u(f|F){3}9|\\\\u(f|F){3}(b|B)"
+ "|\\\\u05(d|D)0|\\\\u05(f|F)3|\\\\u0600|\\\\u0750|\\\\u0(e|E)00|\\\\u1(e|E)00"
+ "|\\\\u2100|\\\\u(f|F)(b|B)50|\\\\u(f|F)(e|E)70|\\\\u(F|f){2}61|\\\\u04(f|F)9"
+ "|\\\\u05(b|B)(e|E)|\\\\u05(e|E)(a|A)|\\\\u05(f|F)4|\\\\u06(f|F){2}"
+ "|\\\\u077(f|F)|\\\\u0(e|E)7(f|F)|\\\\u20(a|A)(f|F)|\\\\u213(a|A)|\\\\u0000"
+ "|\\\\u(f|F)(d|D)(f|F){2}|\\\\u(f|F)(e|E)(f|F){2}|\\\\u(f|F){2}(d|D)(c|C)"
+ "|\\\\u2002|\\\\u0085|\\\\u200(a|A)|\\\\u2005|\\\\u2000|\\\\u2029|\\\\u000(B|b)"
+ "|\\\\u2008|\\\\u2003|\\\\u205(f|F)|\\\\u1680|\\\\u0009|\\\\u0020|\\\\u2006"
+ "|\\\\u2001|\\\\u202(f|F)|\\\\u00(a|A)0|\\\\u000(c|C)|\\\\u2009|\\\\u2004|\\\\u2028"
+ "|\\\\u2028|\\\\u2007|\\\\u2004|\\\\u2028|\\\\u2007|\\\\u2025"
+ "|\\\\u(f|F){2}0(e|E)|\\\\u(f|F){2}61");
/** Allow use escapes for non-printable(control) characters. */
private boolean allowEscapesForControlCharacters;
/** Allow use escapes if trail comment is present. */
private boolean allowByTailComment;
/** Allow if all characters in literal are escaped. */
private boolean allowIfAllCharactersEscaped;
/** Allow escapes for space literals. */
private boolean allowNonPrintableEscapes;
/**
* Set allowIfAllCharactersEscaped.
* @param allow user's value.
*/
public final void setAllowEscapesForControlCharacters(boolean allow) {
allowEscapesForControlCharacters = allow;
}
/**
* Set allowByTailComment.
* @param allow user's value.
*/
public final void setAllowByTailComment(boolean allow) {
allowByTailComment = allow;
}
/**
* Set allowIfAllCharactersEscaped.
* @param allow user's value.
*/
public final void setAllowIfAllCharactersEscaped(boolean allow) {
allowIfAllCharactersEscaped = allow;
}
/**
* Set allowSpaceEscapes.
* @param allow user's value.
*/
public final void setAllowNonPrintableEscapes(boolean allow) {
allowNonPrintableEscapes = allow;
}
@Override
public int[] getDefaultTokens() {
return getAcceptableTokens();
}
@Override
public int[] getAcceptableTokens() {
return new int[] {TokenTypes.STRING_LITERAL, TokenTypes.CHAR_LITERAL};
}
@Override
public int[] getRequiredTokens() {
return getAcceptableTokens();
}
@Override
public void visitToken(DetailAST ast) {
final String literal = ast.getText();
if (hasUnicodeChar(literal) && !(allowByTailComment && hasTrailComment(ast)
|| isAllCharactersEscaped(literal)
|| allowEscapesForControlCharacters
&& isOnlyUnicodeValidChars(literal, UNICODE_CONTROL)
|| allowNonPrintableEscapes
&& isOnlyUnicodeValidChars(literal, NON_PRINTABLE_CHARS))) {
log(ast.getLineNo(), "forbid.escaped.unicode.char");
}
}
/**
* Checks if literal has Unicode chars.
* @param literal String literal.
* @return true if literal has Unicode chars.
*/
private static boolean hasUnicodeChar(String literal) {
return UNICODE_REGEXP.matcher(literal).find();
}
/**
* Check if String literal contains Unicode control chars.
* @param literal String literal.
* @param pattern RegExp for valid characters.
* @return true, if String literal contains Unicode control chars.
*/
private static boolean isOnlyUnicodeValidChars(String literal, Pattern pattern) {
final int unicodeMatchesCounter =
countMatches(UNICODE_REGEXP, literal);
final int unicodeValidMatchesCounter =
countMatches(pattern, literal);
return unicodeMatchesCounter - unicodeValidMatchesCounter == 0;
}
/**
* Check if trail comment is present after ast token.
* @param ast current token.
* @return true if trail comment is present after ast token.
*/
private boolean hasTrailComment(DetailAST ast) {
final DetailAST variableDef = getVariableDef(ast);
DetailAST semi;
if (variableDef != null) {
semi = variableDef.getNextSibling();
if (semi.getType() != TokenTypes.SEMI) {
semi = variableDef.getLastChild();
}
}
else {
semi = getSemi(ast);
}
boolean result = false;
if (semi != null) {
final int lineNo = semi.getLineNo();
final String currentLine = getLine(lineNo - 1);
if (COMMENT_REGEXP.matcher(currentLine).find()) {
result = true;
}
}
return result;
}
/**
* Count regexp matches into String literal.
* @param pattern pattern.
* @param target String literal.
* @return count of regexp matches.
*/
private static int countMatches(Pattern pattern, String target) {
int matcherCounter = 0;
final Matcher matcher = pattern.matcher(target);
while (matcher.find()) {
matcherCounter++;
}
return matcherCounter;
}
/**
* Get variable definition.
* @param ast current token.
* @return variable definition.
*/
private static DetailAST getVariableDef(DetailAST ast) {
DetailAST result = ast.getParent();
while (result != null
&& result.getType() != TokenTypes.VARIABLE_DEF) {
result = result.getParent();
}
return result;
}
/**
* Get semi token.
* @param ast current token.
* @return semi token or null.
*/
private static DetailAST getSemi(DetailAST ast) {
DetailAST result = ast.getParent();
while (result != null
&& result.getLastChild().getType() != TokenTypes.SEMI) {
result = result.getParent();
}
if (result != null) {
result = result.getLastChild();
}
return result;
}
/**
* Checks if all characters in String literal is escaped.
* @param literal current literal.
* @return true if all characters in String literal is escaped.
*/
private boolean isAllCharactersEscaped(String literal) {
return allowIfAllCharactersEscaped
&& ALL_ESCAPED_CHARS.matcher(literal.substring(1,
literal.length() - 1)).find();
}
}
| lgpl-2.1 |
miraculix0815/jexcelapi | src/jxl/write/biff/StyleXFRecord.java | 1809 | /*********************************************************************
*
* Copyright (C) 2002 Andrew Khan
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
***************************************************************************/
package jxl.write.biff;
import jxl.biff.DisplayFormat;
import jxl.biff.FontRecord;
import jxl.biff.XFRecord;
/**
* A style XF Record
*/
public class StyleXFRecord extends XFRecord
{
/**
* Constructor
*
* @param fnt the font for this style
* @param form the format of this style
*/
public StyleXFRecord(FontRecord fnt, DisplayFormat form)
{
super(fnt, form);
setXFDetails(XFRecord.style, 0xfff0);
}
/**
* Sets the raw cell options. Called by WritableFormattingRecord
* when setting the built in cell formats
*
* @param opt the cell options
*/
public final void setCellOptions(int opt)
{
super.setXFCellOptions(opt);
}
/**
* Sets whether or not this XF record locks the cell
*
* @param l the locked flag
* @exception WriteException
*/
public void setLocked(boolean l)
{
super.setXFLocked(l);
}
}
| lgpl-3.0 |
jblievremont/sonarqube | sonar-batch/src/main/java/org/sonar/batch/scan/filesystem/AdditionalFilePredicates.java | 1579 | /*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.batch.scan.filesystem;
import org.sonar.api.batch.fs.InputFile;
import org.sonar.api.batch.fs.internal.AbstractFilePredicate;
import org.sonar.api.batch.fs.internal.DefaultInputFile;
/**
* Additional {@link org.sonar.api.batch.fs.FilePredicate}s that are
* not published in public API
*/
class AdditionalFilePredicates {
private AdditionalFilePredicates() {
// only static inner classes
}
static class KeyPredicate extends AbstractFilePredicate {
private final String key;
KeyPredicate(String key) {
this.key = key;
}
@Override
public boolean apply(InputFile f) {
return key.equals(((DefaultInputFile) f).key());
}
}
}
| lgpl-3.0 |
irudyak/ignite | modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/IgnitePdsTxCacheRebalancingTest.java | 2275 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.persistence;
import org.apache.ignite.cache.CacheAtomicityMode;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cache.CacheRebalanceMode;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.configuration.CacheConfiguration;
/**
*
*/
public class IgnitePdsTxCacheRebalancingTest extends IgnitePdsCacheRebalancingAbstractTest {
/** {@inheritDoc} */
@Override protected CacheConfiguration cacheConfiguration(String cacheName) {
CacheConfiguration ccfg = new CacheConfiguration(cacheName);
ccfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL);
ccfg.setCacheMode(CacheMode.PARTITIONED);
ccfg.setRebalanceMode(CacheRebalanceMode.SYNC);
ccfg.setBackups(1);
ccfg.setRebalanceDelay(10_000);
ccfg.setAffinity(new RendezvousAffinityFunction(false, 32));
ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
return ccfg;
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
super.afterTest();
explicitTx = false;
}
/**
* @throws Exception If failed.
*/
public void testTopologyChangesWithConstantLoadExplicitTx() throws Exception {
explicitTx = true;
testTopologyChangesWithConstantLoad();
}
}
| apache-2.0 |
kasun32/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.usage/org.wso2.carbon.apimgt.usage.client/src/main/java/org/wso2/carbon/apimgt/usage/client/dto/AppCallTypeDTO.java | 813 | package org.wso2.carbon.apimgt.usage.client.dto;
import java.util.List;
/**
* Created by asiri on 3/23/14.
*/
public class AppCallTypeDTO {
private String appName;
private String apiName;
private String consumerKey;
private List<String> methods;
public String getconsumerKey() {return consumerKey; }
public void setconsumerKey(String consumerKey) { this.consumerKey = consumerKey;}
public String getappName() {return appName; }
public void setappName(String appName) { this.appName = appName;}
public List<String> getCallType() {return methods;}
public void setCallType(List<String> methods){ this.methods = methods;}
public String getApiName() {return apiName; }
public void setApiName(String apiName) {
this.apiName = apiName;
}
}
| apache-2.0 |
dslomov/bazel | src/main/java/com/google/devtools/build/lib/events/ExtendedEventHandler.java | 2320 | // Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.events;
/**
* Interface for reporting events during the build. It extends the {@link EventHandler} by also
* allowing posting more structured information.
*/
public interface ExtendedEventHandler extends EventHandler {
/** Interface for declaring events that can be posted via the extended event handler */
interface Postable {}
/** Post an postable object with more refined information about an important build event */
void post(Postable obj);
/**
* Interface for declaring postable events that report about progress (as opposed to success or
* failure) and hence should not be stored and replayed.
*/
interface ProgressLike extends Postable {}
/** Interface for progress events that report about fetching from a remote site */
interface FetchProgress extends ProgressLike {
/**
* The resource that was originally requested and uniquely determines the fetch source. The
* actual fetching may use mirrors, proxies, or similar. The resource need not be an URL, but it
* has to uniquely identify the particular fetch among all fetch events.
*/
String getResourceIdentifier();
/** Human readable description of the progress */
String getProgress();
/** Wether the fetch progress reported about is finished already */
boolean isFinished();
}
/** Interface for events reporting information to be added to a resolved file. */
interface ResolvedEvent extends ProgressLike {
/** The name of the resolved entity, e.g., the name of an external repository */
String getName();
/** The entry for the list of resolved Information. */
Object getResolvedInformation();
}
}
| apache-2.0 |
asedunov/intellij-community | platform/external-system-impl/src/com/intellij/openapi/externalSystem/service/settings/AbstractExternalSystemConfigurable.java | 10575 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.externalSystem.service.settings;
import com.intellij.openapi.externalSystem.ExternalSystemManager;
import com.intellij.openapi.externalSystem.model.ProjectSystemId;
import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemSettings;
import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings;
import com.intellij.openapi.externalSystem.settings.ExternalSystemSettingsListener;
import com.intellij.openapi.externalSystem.util.*;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.openapi.project.Project;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.components.JBList;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.ContainerUtilRt;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import java.awt.*;
import java.io.File;
import java.util.Comparator;
import java.util.List;
/**
* Base class that simplifies external system settings management.
* <p/>
* The general idea is to provide a control which looks like below:
* <pre>
* ----------------------------------------------
* | linked external projects list |
* |----------------------------------------------
* | linked project-specific settings |
* |----------------------------------------------
* | external system-wide settings (optional) |
----------------------------------------------
* </pre>
*
* @author Denis Zhdanov
* @since 4/30/13 12:50 PM
*/
public abstract class AbstractExternalSystemConfigurable<
ProjectSettings extends ExternalProjectSettings,
L extends ExternalSystemSettingsListener<ProjectSettings>,
SystemSettings extends AbstractExternalSystemSettings<SystemSettings, ProjectSettings, L>
> implements SearchableConfigurable, Configurable.NoScroll
{
@NotNull private final List<ExternalSystemSettingsControl<ProjectSettings>> myProjectSettingsControls = ContainerUtilRt.newArrayList();
@NotNull private final ProjectSystemId myExternalSystemId;
@NotNull private final Project myProject;
@Nullable private ExternalSystemSettingsControl<SystemSettings> mySystemSettingsControl;
@Nullable private ExternalSystemSettingsControl<ProjectSettings> myActiveProjectSettingsControl;
private PaintAwarePanel myComponent;
private JBList myProjectsList;
private DefaultListModel myProjectsModel;
protected AbstractExternalSystemConfigurable(@NotNull Project project, @NotNull ProjectSystemId externalSystemId) {
myProject = project;
myExternalSystemId = externalSystemId;
}
@NotNull
public Project getProject() {
return myProject;
}
@Nls
@Override
public String getDisplayName() {
return myExternalSystemId.getReadableName();
}
@Nullable
@Override
public JComponent createComponent() {
if (myComponent == null) {
myComponent = new PaintAwarePanel(new GridBagLayout());
SystemSettings settings = getSettings();
prepareProjectSettings(settings);
prepareSystemSettings(settings);
ExternalSystemUiUtil.fillBottom(myComponent);
}
return myComponent;
}
@SuppressWarnings("unchecked")
@NotNull
private SystemSettings getSettings() {
ExternalSystemManager<ProjectSettings, L, SystemSettings, ?, ?> manager =
(ExternalSystemManager<ProjectSettings, L, SystemSettings, ?, ?>)ExternalSystemApiUtil.getManager(myExternalSystemId);
assert manager != null;
return manager.getSettingsProvider().fun(myProject);
}
@SuppressWarnings("unchecked")
private void prepareProjectSettings(@NotNull SystemSettings s) {
myProjectsModel = new DefaultListModel();
myProjectsList = new JBList(myProjectsModel);
myProjectsList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
addTitle(ExternalSystemBundle.message("settings.title.linked.projects", myExternalSystemId.getReadableName()));
myComponent.add(new JBScrollPane(myProjectsList), ExternalSystemUiUtil.getFillLineConstraints(1));
addTitle(ExternalSystemBundle.message("settings.title.project.settings"));
List<ProjectSettings> settings = ContainerUtilRt.newArrayList(s.getLinkedProjectsSettings());
myProjectsList.setVisibleRowCount(Math.max(3, Math.min(5, settings.size())));
ContainerUtil.sort(settings, Comparator.comparing(s2 -> getProjectName(s2.getExternalProjectPath())));
myProjectSettingsControls.clear();
for (ProjectSettings setting : settings) {
ExternalSystemSettingsControl<ProjectSettings> control = createProjectSettingsControl(setting);
control.fillUi(myComponent, 1);
myProjectsModel.addElement(getProjectName(setting.getExternalProjectPath()));
myProjectSettingsControls.add(control);
if (control instanceof AbstractExternalProjectSettingsControl<?>) {
((AbstractExternalProjectSettingsControl)control).setCurrentProject(myProject);
}
control.showUi(false);
}
myProjectsList.addListSelectionListener(new ListSelectionListener() {
@SuppressWarnings("unchecked")
@Override
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting()) {
return;
}
int i = myProjectsList.getSelectedIndex();
if (i < 0) {
return;
}
if (myActiveProjectSettingsControl != null) {
myActiveProjectSettingsControl.showUi(false);
}
myActiveProjectSettingsControl = myProjectSettingsControls.get(i);
myActiveProjectSettingsControl.showUi(true);
}
});
if (!myProjectsModel.isEmpty()) {
myProjectsList.setSelectedIndex(0);
}
}
public void selectProject(@NotNull String linkedProjectPath) {
myProjectsList.setSelectedValue(getProjectName(linkedProjectPath), true);
}
private void addTitle(@NotNull String title) {
JPanel panel = new JPanel(new GridBagLayout());
panel.setBorder(IdeBorderFactory.createTitledBorder(title, false, JBUI.emptyInsets()));
myComponent.add(panel, ExternalSystemUiUtil.getFillLineConstraints(0));
}
/**
* Creates a control for managing given project settings.
*
* @param settings target external project settings
* @return control for managing given project settings
*/
@NotNull
protected abstract ExternalSystemSettingsControl<ProjectSettings> createProjectSettingsControl(@NotNull ProjectSettings settings);
@SuppressWarnings("MethodMayBeStatic")
@NotNull
protected String getProjectName(@NotNull String path) {
File file = new File(path);
return file.isDirectory() || file.getParentFile() == null ? file.getName() : file.getParentFile().getName();
}
private void prepareSystemSettings(@NotNull SystemSettings s) {
mySystemSettingsControl = createSystemSettingsControl(s);
if (mySystemSettingsControl != null) {
addTitle(ExternalSystemBundle.message("settings.title.system.settings", myExternalSystemId.getReadableName()));
mySystemSettingsControl.fillUi(myComponent, 1);
}
}
/**
* Creates a control for managing given system-level settings (if any).
*
* @param settings target system settings
* @return a control for managing given system-level settings;
* {@code null} if current external system doesn't have system-level settings (only project-level settings)
*/
@Nullable
protected abstract ExternalSystemSettingsControl<SystemSettings> createSystemSettingsControl(@NotNull SystemSettings settings);
@Override
public boolean isModified() {
for (ExternalSystemSettingsControl<ProjectSettings> control : myProjectSettingsControls) {
if (control.isModified()) {
return true;
}
}
return mySystemSettingsControl != null && mySystemSettingsControl.isModified();
}
@Override
public void apply() throws ConfigurationException {
SystemSettings systemSettings = getSettings();
L publisher = systemSettings.getPublisher();
publisher.onBulkChangeStart();
try {
List<ProjectSettings> projectSettings = ContainerUtilRt.newArrayList();
for (ExternalSystemSettingsControl<ProjectSettings> control : myProjectSettingsControls) {
ProjectSettings s = newProjectSettings();
control.apply(s);
projectSettings.add(s);
}
systemSettings.setLinkedProjectsSettings(projectSettings);
for (ExternalSystemSettingsControl<ProjectSettings> control : myProjectSettingsControls) {
if(control instanceof AbstractExternalProjectSettingsControl){
AbstractExternalProjectSettingsControl.class.cast(control).updateInitialSettings();
}
}
if (mySystemSettingsControl != null) {
mySystemSettingsControl.apply(systemSettings);
}
}
finally {
publisher.onBulkChangeEnd();
}
}
/**
* @return new empty project-level settings object
*/
@NotNull
protected abstract ProjectSettings newProjectSettings();
@Override
public void reset() {
for (ExternalSystemSettingsControl<ProjectSettings> control : myProjectSettingsControls) {
control.reset();
}
if (mySystemSettingsControl != null) {
mySystemSettingsControl.reset();
}
}
@Override
public void disposeUIResources() {
for (ExternalSystemSettingsControl<ProjectSettings> control : myProjectSettingsControls) {
control.disposeUIResources();
}
myProjectSettingsControls.clear();
myComponent = null;
myProjectsList = null;
myProjectsModel = null;
mySystemSettingsControl = null;
}
}
| apache-2.0 |
jwren/intellij-community | platform/util/concurrency/com/intellij/util/containers/ConcurrentPackedBitsArrayImpl.java | 1757 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.containers;
class ConcurrentPackedBitsArrayImpl implements ConcurrentPackedBitsArray {
private final int bitsPerChunk;
private final ConcurrentBitSetImpl bits = new ConcurrentBitSetImpl();
private final int mask;
private final int chunksPerWord;
ConcurrentPackedBitsArrayImpl(int bitsPerChunk) {
if (bitsPerChunk <= 0 || bitsPerChunk > ConcurrentBitSetImpl.BITS_PER_WORD) {
throw new IllegalArgumentException("Bits-to-pack number must be between 1 and " + ConcurrentBitSetImpl.BITS_PER_WORD + ", but got: "+bitsPerChunk);
}
this.bitsPerChunk = bitsPerChunk;
mask = bitsPerChunk == Integer.SIZE ? -1 : (1 << bitsPerChunk) - 1;
chunksPerWord = ConcurrentBitSetImpl.BITS_PER_WORD / bitsPerChunk;
}
/**
* {@inheritDoc}
*/
@Override
public long get(int id) {
assert id >= 0 : id;
int bitIndex = id/chunksPerWord * ConcurrentBitSetImpl.BITS_PER_WORD + (id%chunksPerWord)*bitsPerChunk;
return bits.getWord(bitIndex) >> bitIndex;
}
// stores chunk atomically, returns previous chunk
@Override
public long set(int id, final long flags) {
assert id >= 0 : id;
if ((flags & ~mask) != 0) {
throw new IllegalArgumentException("Flags must be between 0 and "+ mask +" but got:"+flags);
}
final int bitIndex = id/chunksPerWord * ConcurrentBitSetImpl.BITS_PER_WORD + (id%chunksPerWord)*bitsPerChunk;
int prevChunk = bits.changeWord(bitIndex, word -> word & ~(mask << bitIndex) | ((int)flags << bitIndex)) >> bitIndex;
return prevChunk;
}
@Override
public void clear() {
bits.clear();
}
}
| apache-2.0 |
smmribeiro/intellij-community | platform/analysis-api/src/com/intellij/lang/LanguageAnnotators.java | 719 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.lang;
import com.intellij.lang.annotation.Annotator;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.util.KeyedLazyInstance;
import org.jetbrains.annotations.NonNls;
public final class LanguageAnnotators extends LanguageExtension<Annotator> {
@NonNls public static final ExtensionPointName<KeyedLazyInstance<Annotator>> EP_NAME = ExtensionPointName.create("com.intellij.annotator");
public static final LanguageAnnotators INSTANCE = new LanguageAnnotators();
private LanguageAnnotators() {
super(EP_NAME);
}
}
| apache-2.0 |
smmribeiro/intellij-community | jps/jps-builders/src/org/jetbrains/jps/incremental/artifacts/ArtifactOutputToSourceMapping.java | 4035 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.jps.incremental.artifacts;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.DataExternalizer;
import com.intellij.util.io.IOUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.incremental.relativizer.PathRelativizerService;
import org.jetbrains.jps.incremental.storage.AbstractStateStorage;
import org.jetbrains.jps.incremental.storage.PathStringDescriptor;
import java.io.*;
import java.util.Collections;
import java.util.List;
/**
* Stores source paths for each output path. If a source file or an output file is located in a jar file the path to the jar file is stored.
*/
public class ArtifactOutputToSourceMapping extends AbstractStateStorage<String, List<ArtifactOutputToSourceMapping.SourcePathAndRootIndex>> {
private final PathRelativizerService myRelativizer;
public ArtifactOutputToSourceMapping(File storePath, PathRelativizerService relativizer) throws IOException {
super(storePath, PathStringDescriptor.INSTANCE, new SourcePathListExternalizer());
myRelativizer = relativizer;
}
@Override
public void update(String path, @Nullable List<SourcePathAndRootIndex> state) throws IOException {
super.update(normalizePath(path), state != null ? normalizePaths(state) : null);
}
@Override
public void appendData(String path, List<SourcePathAndRootIndex> data) throws IOException {
super.appendData(normalizePath(path), data != null ? normalizePaths(data) : null);
}
public void appendData(String outputPath, int rootIndex, String sourcePath) throws IOException {
super.appendData(normalizePath(outputPath), Collections.singletonList(new SourcePathAndRootIndex(normalizePath(sourcePath), rootIndex)));
}
@Override
public void remove(String path) throws IOException {
super.remove(normalizePath(path));
}
@Nullable
@Override
public List<SourcePathAndRootIndex> getState(String path) throws IOException {
List<SourcePathAndRootIndex> list = super.getState(normalizePath(path));
return list != null ? ContainerUtil.map(list, it -> new SourcePathAndRootIndex(myRelativizer.toFull(it.myPath), it.myRootIndex)) : null;
}
private String normalizePath(@NotNull String path) {
return myRelativizer.toRelative(path);
}
private List<SourcePathAndRootIndex> normalizePaths(@NotNull List<SourcePathAndRootIndex> state) {
List<SourcePathAndRootIndex> normalizePathList = new SmartList<>();
state.forEach(it -> normalizePathList.add(new SourcePathAndRootIndex(normalizePath(it.myPath), it.myRootIndex)));
return normalizePathList;
}
public static final class SourcePathAndRootIndex {
private final String myPath;
private final int myRootIndex;
private SourcePathAndRootIndex(String path, int rootIndex) {
myPath = path;
myRootIndex = rootIndex;
}
public String getPath() {
return myPath;
}
public int getRootIndex() {
return myRootIndex;
}
}
private static class SourcePathListExternalizer implements DataExternalizer<List<SourcePathAndRootIndex>> {
@Override
public void save(@NotNull DataOutput out, List<SourcePathAndRootIndex> value) throws IOException {
for (SourcePathAndRootIndex pair : value) {
IOUtil.writeUTF(out, pair.myPath);
out.writeInt(pair.getRootIndex());
}
}
@Override
public List<SourcePathAndRootIndex> read(@NotNull DataInput in) throws IOException {
List<SourcePathAndRootIndex> result = new SmartList<>();
final DataInputStream stream = (DataInputStream)in;
while (stream.available() > 0) {
final String path = IOUtil.readUTF(stream);
final int index = stream.readInt();
result.add(new SourcePathAndRootIndex(path, index));
}
return result;
}
}
}
| apache-2.0 |
rhoml/elasticsearch | core/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresRequest.java | 4533 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.indices.shards;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.EnumSet;
/**
* Request for {@link IndicesShardStoresAction}
*/
public class IndicesShardStoresRequest extends MasterNodeReadRequest<IndicesShardStoresRequest> implements IndicesRequest.Replaceable {
private String[] indices = Strings.EMPTY_ARRAY;
private IndicesOptions indicesOptions = IndicesOptions.strictExpand();
private EnumSet<ClusterHealthStatus> statuses = EnumSet.of(ClusterHealthStatus.YELLOW, ClusterHealthStatus.RED);
/**
* Create a request for shard stores info for <code>indices</code>
*/
public IndicesShardStoresRequest(String... indices) {
this.indices = indices;
}
public IndicesShardStoresRequest() {
}
/**
* Set statuses to filter shards to get stores info on.
* see {@link ClusterHealthStatus} for details.
* Defaults to "yellow" and "red" status
* @param shardStatuses acceptable values are "green", "yellow", "red" and "all"
*/
public IndicesShardStoresRequest shardStatuses(String... shardStatuses) {
statuses = EnumSet.noneOf(ClusterHealthStatus.class);
for (String statusString : shardStatuses) {
if ("all".equalsIgnoreCase(statusString)) {
statuses = EnumSet.allOf(ClusterHealthStatus.class);
return this;
}
statuses.add(ClusterHealthStatus.fromString(statusString));
}
return this;
}
/**
* Specifies what type of requested indices to ignore and wildcard indices expressions
* By default, expands wildcards to both open and closed indices
*/
public IndicesShardStoresRequest indicesOptions(IndicesOptions indicesOptions) {
this.indicesOptions = indicesOptions;
return this;
}
/**
* Sets the indices for the shard stores request
*/
@Override
public IndicesShardStoresRequest indices(String... indices) {
this.indices = indices;
return this;
}
/**
* Returns the shard criteria to get store information on
*/
public EnumSet<ClusterHealthStatus> shardStatuses() {
return statuses;
}
@Override
public String[] indices() {
return indices;
}
@Override
public IndicesOptions indicesOptions() {
return indicesOptions;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArrayNullable(indices);
out.writeVInt(statuses.size());
for (ClusterHealthStatus status : statuses) {
out.writeByte(status.value());
}
indicesOptions.writeIndicesOptions(out);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
indices = in.readStringArray();
int nStatus = in.readVInt();
statuses = EnumSet.noneOf(ClusterHealthStatus.class);
for (int i = 0; i < nStatus; i++) {
statuses.add(ClusterHealthStatus.fromValue(in.readByte()));
}
indicesOptions = IndicesOptions.readIndicesOptions(in);
}
}
| apache-2.0 |
gnodet/camel | components/camel-websocket/src/test/java/org/apache/camel/component/websocket/WebsocketSSLClientAuthRouteExampleTest.java | 8332 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.websocket;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.net.ssl.SSLContext;
import io.netty.handler.ssl.ClientAuth;
import io.netty.handler.ssl.JdkSslContext;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.support.jsse.KeyManagersParameters;
import org.apache.camel.support.jsse.KeyStoreParameters;
import org.apache.camel.support.jsse.SSLContextParameters;
import org.apache.camel.support.jsse.SSLContextServerParameters;
import org.apache.camel.support.jsse.TrustManagersParameters;
import org.apache.camel.test.AvailablePortFinder;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.asynchttpclient.AsyncHttpClient;
import org.asynchttpclient.AsyncHttpClientConfig;
import org.asynchttpclient.DefaultAsyncHttpClient;
import org.asynchttpclient.DefaultAsyncHttpClientConfig;
import org.asynchttpclient.ws.WebSocket;
import org.asynchttpclient.ws.WebSocketListener;
import org.asynchttpclient.ws.WebSocketUpgradeHandler;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class WebsocketSSLClientAuthRouteExampleTest extends CamelTestSupport {
private static List<String> received = new ArrayList<>();
private static CountDownLatch latch = new CountDownLatch(10);
private Properties originalValues = new Properties();
private String pwd = "changeit";
private int port;
private Logger log = LoggerFactory.getLogger(getClass());
@Override
@BeforeEach
public void setUp() throws Exception {
port = AvailablePortFinder.getNextAvailable();
super.setUp();
}
protected AsyncHttpClient createAsyncHttpSSLClient() throws IOException, GeneralSecurityException {
AsyncHttpClient c;
AsyncHttpClientConfig config;
DefaultAsyncHttpClientConfig.Builder builder = new DefaultAsyncHttpClientConfig.Builder();
SSLContextParameters sslContextParameters = new SSLContextParameters();
KeyStoreParameters truststoreParameters = new KeyStoreParameters();
truststoreParameters.setResource("jsse/localhost.p12");
truststoreParameters.setPassword(pwd);
TrustManagersParameters clientSSLTrustManagers = new TrustManagersParameters();
clientSSLTrustManagers.setKeyStore(truststoreParameters);
sslContextParameters.setTrustManagers(clientSSLTrustManagers);
KeyStoreParameters keystoreParameters = new KeyStoreParameters();
keystoreParameters.setResource("jsse/localhost.p12");
keystoreParameters.setPassword(pwd);
KeyManagersParameters clientAuthClientSSLKeyManagers = new KeyManagersParameters();
clientAuthClientSSLKeyManagers.setKeyPassword(pwd);
clientAuthClientSSLKeyManagers.setKeyStore(keystoreParameters);
sslContextParameters.setKeyManagers(clientAuthClientSSLKeyManagers);
SSLContext sslContext = sslContextParameters.createSSLContext(context());
JdkSslContext ssl = new JdkSslContext(sslContext, true, ClientAuth.REQUIRE);
builder.setSslContext(ssl);
builder.setDisableHttpsEndpointIdentificationAlgorithm(true);
config = builder.build();
c = new DefaultAsyncHttpClient(config);
return c;
}
protected SSLContextParameters defineSSLContextParameters() {
KeyStoreParameters ksp = new KeyStoreParameters();
ksp.setResource("jsse/localhost.p12");
ksp.setPassword(pwd);
KeyManagersParameters kmp = new KeyManagersParameters();
kmp.setKeyPassword(pwd);
kmp.setKeyStore(ksp);
TrustManagersParameters tmp = new TrustManagersParameters();
tmp.setKeyStore(ksp);
SSLContextServerParameters scsp = new SSLContextServerParameters();
scsp.setClientAuthentication("REQUIRE");
SSLContextParameters sslContextParameters = new SSLContextParameters();
sslContextParameters.setKeyManagers(kmp);
sslContextParameters.setTrustManagers(tmp);
sslContextParameters.setServerParameters(scsp);
return sslContextParameters;
}
@Test
public void testWSHttpCall() throws Exception {
AsyncHttpClient c = createAsyncHttpSSLClient();
WebSocket websocket = c.prepareGet("wss://127.0.0.1:" + port + "/test").execute(
new WebSocketUpgradeHandler.Builder()
.addWebSocketListener(new WebSocketListener() {
@Override
public void onOpen(WebSocket websocket) {
}
@Override
public void onClose(WebSocket websocket, int code, String reason) {
}
@Override
public void onError(Throwable t) {
log.warn("Unhandled exception: {}", t.getMessage(), t);
}
@Override
public void onBinaryFrame(byte[] payload, boolean finalFragment, int rsv) {
}
@Override
public void onTextFrame(String payload, boolean finalFragment, int rsv) {
received.add(payload);
log.info("received --> " + payload);
latch.countDown();
}
@Override
public void onPingFrame(byte[] payload) {
}
@Override
public void onPongFrame(byte[] payload) {
}
}).build())
.get();
getMockEndpoint("mock:client").expectedBodiesReceived("Hello from WS client");
websocket.sendTextFrame("Hello from WS client");
assertTrue(latch.await(10, TimeUnit.SECONDS));
assertMockEndpointsSatisfied();
assertEquals(10, received.size());
for (int i = 0; i < 10; i++) {
assertEquals(">> Welcome on board!", received.get(i));
}
websocket.sendCloseFrame();
c.close();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() {
WebsocketComponent websocketComponent = (WebsocketComponent) context.getComponent("websocket");
websocketComponent.setSslContextParameters(defineSSLContextParameters());
websocketComponent.setPort(port);
websocketComponent.setMinThreads(1);
websocketComponent.setMaxThreads(25);
from("websocket://test")
.log(">>> Message received from WebSocket Client : ${body}")
.to("mock:client")
.loop(10)
.setBody().constant(">> Welcome on board!")
.to("websocket://test");
}
};
}
}
| apache-2.0 |
jomarko/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-client/kie-wb-common-stunner-widgets/src/main/java/org/kie/workbench/common/stunner/client/widgets/explorer/navigator/item/NavigatorThumbnailItemView.java | 5249 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.client.widgets.explorer.navigator.item;
import javax.enterprise.context.Dependent;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.Style;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.safehtml.shared.SafeUri;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Widget;
import org.gwtbootstrap3.client.ui.Heading;
import org.gwtbootstrap3.client.ui.Image;
import org.gwtbootstrap3.client.ui.Panel;
import org.gwtbootstrap3.client.ui.PanelBody;
import org.gwtbootstrap3.client.ui.PanelFooter;
import org.gwtbootstrap3.client.ui.Popover;
import org.gwtbootstrap3.client.ui.ThumbnailLink;
import org.kie.workbench.common.stunner.client.widgets.explorer.navigator.NavigatorItem;
import org.kie.workbench.common.stunner.client.widgets.explorer.navigator.NavigatorItemView;
@Dependent
public class NavigatorThumbnailItemView
extends Composite implements NavigatorItemView<NavigatorItem> {
interface ViewBinder extends UiBinder<Widget, NavigatorThumbnailItemView> {
}
private static ViewBinder uiBinder = GWT.create(ViewBinder.class);
@UiField
FlowPanel mainPanel;
@UiField
Panel panel;
@UiField
PanelBody body;
@UiField
Heading heading;
@UiField
ThumbnailLink item;
@UiField
Popover popover;
@UiField
Image thumbImage;
@UiField
PanelFooter footer;
private NavigatorItem presenter;
private boolean selected = false;
@Override
public void init(final NavigatorItem presenter) {
this.presenter = presenter;
initWidget(uiBinder.createAndBindUi(this));
item.addClickHandler(clickEvent -> presenter.onItemSelected());
footer.addDomHandler(event -> presenter.onItemSelected(),
ClickEvent.getType());
footer.getElement().getStyle().setCursor(Style.Cursor.POINTER);
popover.addShowHandler(showEvent -> onGotFocus());
popover.addHideHandler(hideEvent -> onLostFocus());
}
public NavigatorThumbnailItemView select() {
this.selected = true;
doSelect();
return this;
}
public NavigatorThumbnailItemView deselect() {
this.selected = false;
doDeselect();
return this;
}
@Override
public NavigatorThumbnailItemView setUUID(final String uuid) {
popover.setContent(uuid);
return this;
}
@Override
public NavigatorThumbnailItemView setItemTitle(final String title) {
heading.setText(title);
heading.setTitle(title);
popover.setTitle(title);
return this;
}
@Override
public NavigatorThumbnailItemView setThumbData(final String thumbData) {
thumbImage.setUrl(thumbData);
return this;
}
@Override
public NavigatorThumbnailItemView setThumbUri(final SafeUri safeUri) {
thumbImage.setUrl(safeUri);
return this;
}
@Override
public NavigatorThumbnailItemView setItemPxSize(final int width,
final int height) {
thumbImage.addLoadHandler((e) -> {
final int imgWidth = thumbImage.getWidth();
final int imgHeight = thumbImage.getHeight();
final float wfactor = imgWidth > width ? imgWidth / width : 1;
final float hfactor = imgHeight > height ? imgHeight / height : 1;
final float factor = wfactor >= hfactor ? wfactor : hfactor;
int w = width;
int h = height;
if (factor > 1) {
w = (int) Math.ceil(imgWidth / factor);
h = (int) Math.ceil(imgHeight / factor);
}
thumbImage.setPixelSize(w,
h);
body.setPixelSize(width,
height);
});
return this;
}
private void doSelect() {
panel.getElement().getStyle().setBorderColor("#0000FF");
heading.getElement().getStyle().setFontWeight(Style.FontWeight.BOLD);
}
private void doDeselect() {
if (!selected) {
panel.getElement().getStyle().setBorderColor("#000000");
heading.getElement().getStyle().setFontWeight(Style.FontWeight.NORMAL);
}
}
private void onGotFocus() {
doSelect();
}
private void onLostFocus() {
doDeselect();
}
}
| apache-2.0 |
Sumu-Ning/java-design-patterns | caching/src/main/java/com/iluwatar/caching/AppManager.java | 3549 | /**
* The MIT License
* Copyright (c) 2014 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.caching;
import java.text.ParseException;
/**
*
* AppManager helps to bridge the gap in communication between the main class and the application's
* back-end. DB connection is initialized through this class. The chosen caching strategy/policy is
* also initialized here. Before the cache can be used, the size of the cache has to be set.
* Depending on the chosen caching policy, AppManager will call the appropriate function in the
* CacheStore class.
*
*/
public final class AppManager {
private static CachingPolicy cachingPolicy;
private AppManager() {
}
/**
*
* Developer/Tester is able to choose whether the application should use MongoDB as its underlying
* data storage or a simple Java data structure to (temporarily) store the data/objects during
* runtime.
*/
public static void initDb(boolean useMongoDb) {
if (useMongoDb) {
try {
DbManager.connect();
} catch (ParseException e) {
e.printStackTrace();
}
} else {
DbManager.createVirtualDb();
}
}
/**
* Initialize caching policy
*/
public static void initCachingPolicy(CachingPolicy policy) {
cachingPolicy = policy;
if (cachingPolicy == CachingPolicy.BEHIND) {
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
CacheStore.flushCache();
}
}));
}
CacheStore.clearCache();
}
public static void initCacheCapacity(int capacity) {
CacheStore.initCapacity(capacity);
}
/**
* Find user account
*/
public static UserAccount find(String userId) {
if (cachingPolicy == CachingPolicy.THROUGH || cachingPolicy == CachingPolicy.AROUND) {
return CacheStore.readThrough(userId);
} else if (cachingPolicy == CachingPolicy.BEHIND) {
return CacheStore.readThroughWithWriteBackPolicy(userId);
}
return null;
}
/**
* Save user account
*/
public static void save(UserAccount userAccount) {
if (cachingPolicy == CachingPolicy.THROUGH) {
CacheStore.writeThrough(userAccount);
} else if (cachingPolicy == CachingPolicy.AROUND) {
CacheStore.writeAround(userAccount);
} else if (cachingPolicy == CachingPolicy.BEHIND) {
CacheStore.writeBehind(userAccount);
}
}
public static String printCacheContent() {
return CacheStore.print();
}
}
| mit |
WeRockStar/java-design-patterns | layers/src/test/java/com/iluwatar/layers/CakeTest.java | 3757 | /**
* The MIT License
* Copyright (c) 2014 Ilkka Seppälä
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.iluwatar.layers;
import org.junit.Test;
import java.util.HashSet;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
/**
* Date: 12/15/15 - 8:02 PM
*
* @author Jeroen Meulemeester
*/
public class CakeTest {
@Test
public void testSetId() {
final Cake cake = new Cake();
assertNull(cake.getId());
final Long expectedId = Long.valueOf(1234L);
cake.setId(expectedId);
assertEquals(expectedId, cake.getId());
}
@Test
public void testSetTopping() {
final Cake cake = new Cake();
assertNull(cake.getTopping());
final CakeTopping expectedTopping = new CakeTopping("DummyTopping", 1000);
cake.setTopping(expectedTopping);
assertEquals(expectedTopping, cake.getTopping());
}
@Test
public void testSetLayers() {
final Cake cake = new Cake();
assertNotNull(cake.getLayers());
assertTrue(cake.getLayers().isEmpty());
final Set<CakeLayer> expectedLayers = new HashSet<>();
expectedLayers.add(new CakeLayer("layer1", 1000));
expectedLayers.add(new CakeLayer("layer2", 2000));
expectedLayers.add(new CakeLayer("layer3", 3000));
cake.setLayers(expectedLayers);
assertEquals(expectedLayers, cake.getLayers());
}
@Test
public void testAddLayer() {
final Cake cake = new Cake();
assertNotNull(cake.getLayers());
assertTrue(cake.getLayers().isEmpty());
final Set<CakeLayer> initialLayers = new HashSet<>();
initialLayers.add(new CakeLayer("layer1", 1000));
initialLayers.add(new CakeLayer("layer2", 2000));
cake.setLayers(initialLayers);
assertEquals(initialLayers, cake.getLayers());
final CakeLayer newLayer = new CakeLayer("layer3", 3000);
cake.addLayer(newLayer);
final Set<CakeLayer> expectedLayers = new HashSet<>();
expectedLayers.addAll(initialLayers);
expectedLayers.addAll(initialLayers);
expectedLayers.add(newLayer);
assertEquals(expectedLayers, cake.getLayers());
}
@Test
public void testToString() {
final CakeTopping topping = new CakeTopping("topping", 20);
topping.setId(2345L);
final CakeLayer layer = new CakeLayer("layer", 100);
layer.setId(3456L);
final Cake cake = new Cake();
cake.setId(1234L);
cake.setTopping(topping);
cake.addLayer(layer);
final String expected = "id=1234 topping=id=2345 name=topping calories=20 "
+ "layers=[id=3456 name=layer calories=100]";
assertEquals(expected, cake.toString());
}
}
| mit |
jwren/intellij-community | java/java-tests/testSrc/com/intellij/java/refactoring/InplaceIntroduceFieldTest.java | 2467 | // Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.java.refactoring;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiExpression;
import com.intellij.psi.PsiLocalVariable;
import com.intellij.refactoring.introduceField.IntroduceFieldHandler;
import org.jetbrains.annotations.NotNull;
public class InplaceIntroduceFieldTest extends AbstractJavaInplaceIntroduceTest {
private static final String BASE_PATH = "/refactoring/inplaceIntroduceField/";
public void testAnchor() {
doTest(null);
}
public void testArrayInitializer() {
doTest(null);
}
public void testAnchor1() {
doTest(null);
}
public void testBeforeAssignment() {
doTest(null);
}
public void testTemplateAdjustment() {
doTest(null);
}
public void testBeforeAssignmentReplaceAll() {
doTest(introducer -> introducer.setReplaceAllOccurrences(true));
}
public void testBeforeAssignmentReplaceAllCall() {
doTest(introducer -> introducer.setReplaceAllOccurrences(true));
}
public void testReplaceAll() {
doTest(introducer -> introducer.setReplaceAllOccurrences(true));
}
public void testRestoreNewExpression() {
doTest(introducer -> introducer.setReplaceAllOccurrences(true));
}
public void testLocalResource() {
doTest(introducer -> introducer.setReplaceAllOccurrences(true));
}
public void testNormalizeDeclarationWithDisabledFormatting() {
doTest(null);
}
public void testEscapePosition() {
doTestEscape();
}
public void testEscapePositionOnLocal() {
doTestEscape();
}
public void testExtractNearAnotherDeclaration() {
doTest(null);
}
@Override
protected String getBasePath() {
return BASE_PATH;
}
@Override
protected MyIntroduceHandler createIntroduceHandler() {
return new MyIntroduceFieldHandler();
}
public static class MyIntroduceFieldHandler extends IntroduceFieldHandler implements MyIntroduceHandler {
@Override
public boolean invokeImpl(Project project, @NotNull PsiExpression selectedExpr, Editor editor) {
return super.invokeImpl(project, selectedExpr, editor);
}
@Override
public boolean invokeImpl(Project project, PsiLocalVariable localVariable, Editor editor) {
return super.invokeImpl(project, localVariable, editor);
}
}
} | apache-2.0 |
ascherbakoff/ignite | modules/core/src/test/java/org/apache/ignite/util/TestTcpCommunicationSpi.java | 2512 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.util;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteException;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.internal.managers.communication.GridIoMessage;
import org.apache.ignite.lang.IgniteInClosure;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.spi.IgniteSpiException;
import org.apache.ignite.spi.communication.tcp.TcpCommunicationSpi;
/**
* TcpCommunicationSpi with additional features needed for tests.
*/
public class TestTcpCommunicationSpi extends TcpCommunicationSpi {
/** */
private volatile boolean stopped;
/** */
private Class ignoreMsg;
/** {@inheritDoc} */
@Override public void sendMessage(final ClusterNode node, final Message msg,
IgniteInClosure<IgniteException> ackClosure) throws IgniteSpiException {
if (stopped)
return;
if (ignoreMsg != null && ((GridIoMessage)msg).message().getClass().equals(ignoreMsg))
return;
super.sendMessage(node, msg, ackClosure);
}
/**
*
*/
public void stop() {
stopped = true;
}
/**
*
*/
public void stop(Class ignoreMsg) {
this.ignoreMsg = ignoreMsg;
}
/**
* Stop SPI, messages will not send anymore.
*/
public static void stop(Ignite ignite) {
((TestTcpCommunicationSpi)ignite.configuration().getCommunicationSpi()).stop();
}
/**
* Skip messages will not send anymore.
*/
public static void skipMsgType(Ignite ignite, Class clazz) {
((TestTcpCommunicationSpi)ignite.configuration().getCommunicationSpi()).stop(clazz);
}
}
| apache-2.0 |
sinistersnare/libgdx | gdx/src/com/badlogic/gdx/maps/tiled/TideMapLoader.java | 12123 | /*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.maps.tiled;
import com.badlogic.gdx.assets.AssetDescriptor;
import com.badlogic.gdx.assets.AssetLoaderParameters;
import com.badlogic.gdx.assets.AssetManager;
import com.badlogic.gdx.assets.loaders.FileHandleResolver;
import com.badlogic.gdx.assets.loaders.SynchronousAssetLoader;
import com.badlogic.gdx.assets.loaders.resolvers.InternalFileHandleResolver;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.maps.ImageResolver;
import com.badlogic.gdx.maps.ImageResolver.AssetManagerImageResolver;
import com.badlogic.gdx.maps.ImageResolver.DirectImageResolver;
import com.badlogic.gdx.maps.MapProperties;
import com.badlogic.gdx.maps.tiled.TiledMapTileLayer.Cell;
import com.badlogic.gdx.maps.tiled.tiles.AnimatedTiledMapTile;
import com.badlogic.gdx.maps.tiled.tiles.StaticTiledMapTile;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.GdxRuntimeException;
import com.badlogic.gdx.utils.ObjectMap;
import com.badlogic.gdx.utils.XmlReader;
import com.badlogic.gdx.utils.XmlReader.Element;
import java.io.IOException;
import java.util.StringTokenizer;
public class TideMapLoader extends SynchronousAssetLoader<TiledMap, TideMapLoader.Parameters> {
public static class Parameters extends AssetLoaderParameters<TiledMap> {
}
private XmlReader xml = new XmlReader();
private Element root;
public TideMapLoader () {
super(new InternalFileHandleResolver());
}
public TideMapLoader (FileHandleResolver resolver) {
super(resolver);
}
public TiledMap load (String fileName) {
try {
FileHandle tideFile = resolve(fileName);
root = xml.parse(tideFile);
ObjectMap<String, Texture> textures = new ObjectMap<String, Texture>();
for (FileHandle textureFile : loadTileSheets(root, tideFile)) {
textures.put(textureFile.path(), new Texture(textureFile));
}
DirectImageResolver imageResolver = new DirectImageResolver(textures);
TiledMap map = loadMap(root, tideFile, imageResolver);
map.setOwnedResources(textures.values().toArray());
return map;
} catch (IOException e) {
throw new GdxRuntimeException("Couldn't load tilemap '" + fileName + "'", e);
}
}
@Override
public TiledMap load (AssetManager assetManager, String fileName, FileHandle tideFile, Parameters parameter) {
try {
return loadMap(root, tideFile, new AssetManagerImageResolver(assetManager));
} catch (Exception e) {
throw new GdxRuntimeException("Couldn't load tilemap '" + fileName + "'", e);
}
}
@Override
public Array<AssetDescriptor> getDependencies (String fileName, FileHandle tmxFile, Parameters parameter) {
Array<AssetDescriptor> dependencies = new Array<AssetDescriptor>();
try {
root = xml.parse(tmxFile);
for (FileHandle image : loadTileSheets(root, tmxFile)) {
dependencies.add(new AssetDescriptor(image.path(), Texture.class));
}
return dependencies;
} catch (IOException e) {
throw new GdxRuntimeException("Couldn't load tilemap '" + fileName + "'", e);
}
}
/** Loads the map data, given the XML root element and an {@link ImageResolver} used to return the tileset Textures
* @param root the XML root element
* @param tmxFile the Filehandle of the tmx file
* @param imageResolver the {@link ImageResolver}
* @return the {@link TiledMap} */
private TiledMap loadMap (Element root, FileHandle tmxFile, ImageResolver imageResolver) {
TiledMap map = new TiledMap();
Element properties = root.getChildByName("properties");
if (properties != null) {
loadProperties(map.getProperties(), properties);
}
Element tilesheets = root.getChildByName("TileSheets");
for (Element tilesheet : tilesheets.getChildrenByName("TileSheet")) {
loadTileSheet(map, tilesheet, tmxFile, imageResolver);
}
Element layers = root.getChildByName("Layers");
for (Element layer : layers.getChildrenByName("Layer")) {
loadLayer(map, layer);
}
return map;
}
/** Loads the tilesets
* @param root the root XML element
* @return a list of filenames for images containing tiles
* @throws IOException */
private Array<FileHandle> loadTileSheets (Element root, FileHandle tideFile) throws IOException {
Array<FileHandle> images = new Array<FileHandle>();
Element tilesheets = root.getChildByName("TileSheets");
for (Element tileset : tilesheets.getChildrenByName("TileSheet")) {
Element imageSource = tileset.getChildByName("ImageSource");
FileHandle image = getRelativeFileHandle(tideFile, imageSource.getText());
images.add(image);
}
return images;
}
private void loadTileSheet (TiledMap map, Element element, FileHandle tideFile, ImageResolver imageResolver) {
if (element.getName().equals("TileSheet")) {
String id = element.getAttribute("Id");
String description = element.getChildByName("Description").getText();
String imageSource = element.getChildByName("ImageSource").getText();
Element alignment = element.getChildByName("Alignment");
String sheetSize = alignment.getAttribute("SheetSize");
String tileSize = alignment.getAttribute("TileSize");
String margin = alignment.getAttribute("Margin");
String spacing = alignment.getAttribute("Spacing");
String[] sheetSizeParts = sheetSize.split(" x ");
int sheetSizeX = Integer.parseInt(sheetSizeParts[0]);
int sheetSizeY = Integer.parseInt(sheetSizeParts[1]);
String[] tileSizeParts = tileSize.split(" x ");
int tileSizeX = Integer.parseInt(tileSizeParts[0]);
int tileSizeY = Integer.parseInt(tileSizeParts[1]);
String[] marginParts = margin.split(" x ");
int marginX = Integer.parseInt(marginParts[0]);
int marginY = Integer.parseInt(marginParts[1]);
String[] spacingParts = margin.split(" x ");
int spacingX = Integer.parseInt(spacingParts[0]);
int spacingY = Integer.parseInt(spacingParts[1]);
FileHandle image = getRelativeFileHandle(tideFile, imageSource);
TextureRegion texture = imageResolver.getImage(image.path());
TiledMapTileSets tilesets = map.getTileSets();
int firstgid = 1;
for (TiledMapTileSet tileset : tilesets) {
firstgid += tileset.size();
}
TiledMapTileSet tileset = new TiledMapTileSet();
tileset.setName(id);
tileset.getProperties().put("firstgid", firstgid);
int gid = firstgid;
int stopWidth = texture.getRegionWidth() - tileSizeX;
int stopHeight = texture.getRegionHeight() - tileSizeY;
for (int y = marginY; y <= stopHeight; y += tileSizeY + spacingY) {
for (int x = marginX; x <= stopWidth; x += tileSizeX + spacingX) {
TiledMapTile tile = new StaticTiledMapTile(new TextureRegion(texture, x, y, tileSizeX, tileSizeY));
tile.setId(gid);
tileset.putTile(gid++, tile);
}
}
Element properties = element.getChildByName("Properties");
if (properties != null) {
loadProperties(tileset.getProperties(), properties);
}
tilesets.addTileSet(tileset);
}
}
private void loadLayer (TiledMap map, Element element) {
if (element.getName().equals("Layer")) {
String id = element.getAttribute("Id");
String visible = element.getAttribute("Visible");
Element dimensions = element.getChildByName("Dimensions");
String layerSize = dimensions.getAttribute("LayerSize");
String tileSize = dimensions.getAttribute("TileSize");
String[] layerSizeParts = layerSize.split(" x ");
int layerSizeX = Integer.parseInt(layerSizeParts[0]);
int layerSizeY = Integer.parseInt(layerSizeParts[1]);
String[] tileSizeParts = tileSize.split(" x ");
int tileSizeX = Integer.parseInt(tileSizeParts[0]);
int tileSizeY = Integer.parseInt(tileSizeParts[1]);
TiledMapTileLayer layer = new TiledMapTileLayer(layerSizeX, layerSizeY, tileSizeX, tileSizeY);
layer.setName(id);
layer.setVisible(visible.equalsIgnoreCase("True"));
Element tileArray = element.getChildByName("TileArray");
Array<Element> rows = tileArray.getChildrenByName("Row");
TiledMapTileSets tilesets = map.getTileSets();
TiledMapTileSet currentTileSet = null;
int firstgid = 0;
int x, y;
for (int row = 0, rowCount = rows.size; row < rowCount; row++) {
Element currentRow = rows.get(row);
y = rowCount - 1 - row;
x = 0;
for (int child = 0, childCount = currentRow.getChildCount(); child < childCount; child++) {
Element currentChild = currentRow.getChild(child);
String name = currentChild.getName();
if (name.equals("TileSheet")) {
currentTileSet = tilesets.getTileSet(currentChild.getAttribute("Ref"));
firstgid = currentTileSet.getProperties().get("firstgid", Integer.class);
} else if (name.equals("Null")) {
x += currentChild.getIntAttribute("Count");
} else if (name.equals("Static")) {
Cell cell = new Cell();
cell.setTile(currentTileSet.getTile(firstgid + currentChild.getIntAttribute("Index")));
layer.setCell(x++, y, cell);
} else if (name.equals("Animated")) {
// Create an AnimatedTile
int interval = currentChild.getInt("Interval");
Element frames = currentChild.getChildByName("Frames");
Array<StaticTiledMapTile> frameTiles = new Array<StaticTiledMapTile>();
for (int frameChild = 0, frameChildCount = frames.getChildCount(); frameChild < frameChildCount; frameChild++) {
Element frame = frames.getChild(frameChild);
String frameName = frame.getName();
if (frameName.equals("TileSheet")) {
currentTileSet = tilesets.getTileSet(frame.getAttribute("Ref"));
firstgid = currentTileSet.getProperties().get("firstgid", Integer.class);
} else if (frameName.equals("Static")) {
frameTiles.add((StaticTiledMapTile)currentTileSet.getTile(firstgid + frame.getIntAttribute("Index")));
}
}
Cell cell = new Cell();
cell.setTile(new AnimatedTiledMapTile(interval / 1000f, frameTiles));
layer.setCell(x++, y, cell); // TODO: Reuse existing animated tiles
}
}
}
map.getLayers().add(layer);
}
}
private void loadProperties (MapProperties properties, Element element) {
if (element.getName().equals("Properties")) {
for (Element property : element.getChildrenByName("Property")) {
String key = property.getAttribute("Key", null);
String type = property.getAttribute("Type", null);
String value = property.getText();
if (type.equals("Int32")) {
properties.put(key, Integer.parseInt(value));
} else if (type.equals("String")) {
properties.put(key, value);
} else if (type.equals("Boolean")) {
properties.put(key, value.equalsIgnoreCase("true"));
} else {
properties.put(key, value);
}
}
}
}
private static FileHandle getRelativeFileHandle (FileHandle file, String path) {
StringTokenizer tokenizer = new StringTokenizer(path, "\\/");
FileHandle result = file.parent();
while (tokenizer.hasMoreElements()) {
String token = tokenizer.nextToken();
if (token.equals(".."))
result = result.parent();
else {
result = result.child(token);
}
}
return result;
}
}
| apache-2.0 |
mcgilman/nifi | nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-flowfile-repo-serialization/src/main/java/org/apache/nifi/controller/repository/WriteAheadRepositoryRecordSerde.java | 20439 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.controller.repository;
import org.apache.nifi.controller.repository.claim.ContentClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaim;
import org.apache.nifi.controller.repository.claim.ResourceClaimManager;
import org.apache.nifi.controller.repository.claim.StandardContentClaim;
import org.apache.nifi.flowfile.FlowFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wali.SerDe;
import org.wali.UpdateType;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.Map;
public class WriteAheadRepositoryRecordSerde extends RepositoryRecordSerde implements SerDe<SerializedRepositoryRecord> {
private static final Logger logger = LoggerFactory.getLogger(WriteAheadRepositoryRecordSerde.class);
private static final int CURRENT_ENCODING_VERSION = 9;
public static final byte ACTION_CREATE = 0;
public static final byte ACTION_UPDATE = 1;
public static final byte ACTION_DELETE = 2;
public static final byte ACTION_SWAPPED_OUT = 3;
public static final byte ACTION_SWAPPED_IN = 4;
private long recordsRestored = 0L;
private final ResourceClaimManager claimManager;
public WriteAheadRepositoryRecordSerde(final ResourceClaimManager claimManager) {
this.claimManager = claimManager;
}
@Override
public void serializeEdit(final SerializedRepositoryRecord previousRecordState, final SerializedRepositoryRecord record, final DataOutputStream out) throws IOException {
serializeEdit(previousRecordState, record, out, false);
}
public void serializeEdit(final SerializedRepositoryRecord previousRecordState, final SerializedRepositoryRecord record, final DataOutputStream out, final boolean forceAttributesWritten)
throws IOException {
if (record.isMarkedForAbort()) {
logger.warn("Repository Record {} is marked to be aborted; it will be persisted in the FlowFileRepository as a DELETE record", record);
out.write(ACTION_DELETE);
out.writeLong(getRecordIdentifier(record));
serializeContentClaim(record.getContentClaim(), record.getClaimOffset(), out);
return;
}
final UpdateType updateType = getUpdateType(record);
if (updateType.equals(UpdateType.DELETE)) {
out.write(ACTION_DELETE);
out.writeLong(getRecordIdentifier(record));
serializeContentClaim(record.getContentClaim(), record.getClaimOffset(), out);
return;
}
// If there's a Destination Connection, that's the one that we want to associated with this record.
// However, on restart, we will restore the FlowFile and set this connection to its "originalConnection".
// If we then serialize the FlowFile again before it's transferred, it's important to allow this to happen,
// so we use the originalConnection instead
final String associatedQueueId = record.getQueueIdentifier();
if (updateType.equals(UpdateType.SWAP_OUT)) {
out.write(ACTION_SWAPPED_OUT);
out.writeLong(getRecordIdentifier(record));
out.writeUTF(associatedQueueId);
out.writeUTF(getLocation(record));
return;
}
final FlowFile flowFile = record.getFlowFileRecord();
final ContentClaim claim = record.getContentClaim();
switch (updateType) {
case UPDATE:
out.write(ACTION_UPDATE);
break;
case CREATE:
out.write(ACTION_CREATE);
break;
case SWAP_IN:
out.write(ACTION_SWAPPED_IN);
break;
default:
throw new AssertionError();
}
out.writeLong(getRecordIdentifier(record));
out.writeLong(flowFile.getEntryDate());
out.writeLong(flowFile.getLineageStartDate());
out.writeLong(flowFile.getLineageStartIndex());
final Long queueDate = flowFile.getLastQueueDate();
out.writeLong(queueDate == null ? System.currentTimeMillis() : queueDate);
out.writeLong(flowFile.getQueueDateIndex());
out.writeLong(flowFile.getSize());
if (associatedQueueId == null) {
logger.warn("{} Repository Record {} has no Connection associated with it; it will be destroyed on restart",
new Object[] {this, record});
writeString("", out);
} else {
writeString(associatedQueueId, out);
}
serializeContentClaim(claim, record.getClaimOffset(), out);
if (forceAttributesWritten || record.isAttributesChanged() || updateType == UpdateType.CREATE || updateType == UpdateType.SWAP_IN) {
out.write(1); // indicate attributes changed
final Map<String, String> attributes = flowFile.getAttributes();
out.writeInt(attributes.size());
for (final Map.Entry<String, String> entry : attributes.entrySet()) {
writeString(entry.getKey(), out);
writeString(entry.getValue(), out);
}
} else {
out.write(0); // indicate attributes did not change
}
if (updateType == UpdateType.SWAP_IN) {
out.writeUTF(record.getSwapLocation());
}
}
@Override
public SerializedRepositoryRecord deserializeEdit(final DataInputStream in, final Map<Object, SerializedRepositoryRecord> currentRecordStates, final int version) throws IOException {
final int action = in.read();
final long recordId = in.readLong();
if (action == ACTION_DELETE) {
final StandardFlowFileRecord.Builder ffBuilder = new StandardFlowFileRecord.Builder().id(recordId);
if (version > 4) {
deserializeClaim(in, version, ffBuilder);
}
final FlowFileRecord flowFileRecord = ffBuilder.build();
final SerializedRepositoryRecord record = new ReconstitutedSerializedRepositoryRecord.Builder()
.type(RepositoryRecordType.DELETE)
.flowFileRecord(flowFileRecord)
.build();
return record;
}
if (action == ACTION_SWAPPED_OUT) {
final String queueId = in.readUTF();
final String location = in.readUTF();
final FlowFileRecord flowFileRecord = new StandardFlowFileRecord.Builder()
.id(recordId)
.build();
final SerializedRepositoryRecord record = new ReconstitutedSerializedRepositoryRecord.Builder()
.type(RepositoryRecordType.SWAP_OUT)
.queueIdentifier(queueId)
.swapLocation(location)
.flowFileRecord(flowFileRecord)
.build();
return record;
}
final StandardFlowFileRecord.Builder ffBuilder = new StandardFlowFileRecord.Builder();
final SerializedRepositoryRecord record = currentRecordStates.get(recordId);
ffBuilder.id(recordId);
if (record != null) {
ffBuilder.fromFlowFile(record.getFlowFileRecord());
}
ffBuilder.entryDate(in.readLong());
if (version > 1) {
// read the lineage identifiers and lineage start date, which were added in version 2.
if (version < 9) {
final int numLineageIds = in.readInt();
for (int i = 0; i < numLineageIds; i++) {
in.readUTF(); //skip identifiers
}
}
final long lineageStartDate = in.readLong();
final long lineageStartIndex;
if (version > 7) {
lineageStartIndex = in.readLong();
} else {
lineageStartIndex = 0L;
}
ffBuilder.lineageStart(lineageStartDate, lineageStartIndex);
if (version > 5) {
final long lastQueueDate = in.readLong();
final long queueDateIndex;
if (version > 7) {
queueDateIndex = in.readLong();
} else {
queueDateIndex = 0L;
}
ffBuilder.lastQueued(lastQueueDate, queueDateIndex);
}
}
ffBuilder.size(in.readLong());
final String connectionId = readString(in);
logger.debug("{} -> {}", new Object[] {recordId, connectionId});
deserializeClaim(in, version, ffBuilder);
// recover new attributes, if they changed
final int attributesChanged = in.read();
if (attributesChanged == -1) {
throw new EOFException();
} else if (attributesChanged == 1) {
final int numAttributes = in.readInt();
final Map<String, String> attributes = new HashMap<>();
for (int i = 0; i < numAttributes; i++) {
final String key = readString(in);
final String value = readString(in);
attributes.put(key, value);
}
ffBuilder.addAttributes(attributes);
} else if (attributesChanged != 0) {
throw new IOException("Attribute Change Qualifier not found in stream; found value: "
+ attributesChanged + " after successfully restoring " + recordsRestored + " records. The FlowFile Repository appears to be corrupt!");
}
final FlowFileRecord flowFile = ffBuilder.build();
String swapLocation = null;
if (action == ACTION_SWAPPED_IN) {
swapLocation = in.readUTF();
}
final RepositoryRecordType recordType = getRecordType(action);
final SerializedRepositoryRecord repositoryRecord = new ReconstitutedSerializedRepositoryRecord.Builder()
.flowFileRecord(flowFile)
.queueIdentifier(connectionId)
.swapLocation(swapLocation)
.type(recordType)
.build();
recordsRestored++;
return repositoryRecord;
}
@Override
public SerializedRepositoryRecord deserializeRecord(final DataInputStream in, final int version) throws IOException {
final int action = in.read();
if (action == -1) {
return null;
}
final long recordId = in.readLong();
if (action == ACTION_DELETE) {
final StandardFlowFileRecord.Builder ffBuilder = new StandardFlowFileRecord.Builder().id(recordId);
if (version > 4) {
deserializeClaim(in, version, ffBuilder);
}
final FlowFileRecord flowFileRecord = ffBuilder.build();
final SerializedRepositoryRecord record = new ReconstitutedSerializedRepositoryRecord.Builder()
.type(RepositoryRecordType.DELETE)
.flowFileRecord(flowFileRecord)
.build();
return record;
}
// if action was not delete, it must be create/swap in
final StandardFlowFileRecord.Builder ffBuilder = new StandardFlowFileRecord.Builder();
final long entryDate = in.readLong();
if (version > 1) {
// read the lineage identifiers and lineage start date, which were added in version 2.
if (version < 9) {
final int numLineageIds = in.readInt();
for (int i = 0; i < numLineageIds; i++) {
in.readUTF(); //skip identifiers
}
}
final long lineageStartDate = in.readLong();
final long lineageStartIndex;
if (version > 7) {
lineageStartIndex = in.readLong();
} else {
lineageStartIndex = 0L;
}
ffBuilder.lineageStart(lineageStartDate, lineageStartIndex);
if (version > 5) {
final long lastQueueDate = in.readLong();
final long queueDateIndex;
if (version > 7) {
queueDateIndex = in.readLong();
} else {
queueDateIndex = 0L;
}
ffBuilder.lastQueued(lastQueueDate, queueDateIndex);
}
}
final long size = in.readLong();
final String connectionId = readString(in);
logger.debug("{} -> {}", new Object[] {recordId, connectionId});
ffBuilder.id(recordId);
ffBuilder.entryDate(entryDate);
ffBuilder.size(size);
deserializeClaim(in, version, ffBuilder);
final int attributesChanged = in.read();
if (attributesChanged == 1) {
final int numAttributes = in.readInt();
final Map<String, String> attributes = new HashMap<>();
for (int i = 0; i < numAttributes; i++) {
final String key = readString(in);
final String value = readString(in);
attributes.put(key, value);
}
ffBuilder.addAttributes(attributes);
} else if (attributesChanged == -1) {
throw new EOFException();
} else if (attributesChanged != 0) {
throw new IOException("Attribute Change Qualifier not found in stream; found value: "
+ attributesChanged + " after successfully restoring " + recordsRestored + " records");
}
final FlowFileRecord flowFile = ffBuilder.build();
String swapLocation = null;
if (action == ACTION_SWAPPED_IN) {
swapLocation = in.readUTF();
}
final SerializedRepositoryRecord record = new ReconstitutedSerializedRepositoryRecord.Builder()
.queueIdentifier(connectionId)
.flowFileRecord(flowFile)
.swapLocation(swapLocation)
.type(getRecordType(action))
.build();
recordsRestored++;
return record;
}
private RepositoryRecordType getRecordType(final int serializedUpdateType) {
switch (serializedUpdateType) {
case ACTION_CREATE:
return RepositoryRecordType.CREATE;
case ACTION_SWAPPED_IN:
return RepositoryRecordType.SWAP_IN;
case ACTION_SWAPPED_OUT:
return RepositoryRecordType.SWAP_OUT;
case ACTION_UPDATE:
default:
return RepositoryRecordType.UPDATE;
}
}
@Override
public void serializeRecord(final SerializedRepositoryRecord record, final DataOutputStream out) throws IOException {
serializeEdit(null, record, out, true);
}
private void serializeContentClaim(final ContentClaim claim, final long offset, final DataOutputStream out) throws IOException {
if (claim == null) {
out.write(0);
} else {
out.write(1);
final ResourceClaim resourceClaim = claim.getResourceClaim();
writeString(resourceClaim.getId(), out);
writeString(resourceClaim.getContainer(), out);
writeString(resourceClaim.getSection(), out);
out.writeLong(claim.getOffset());
out.writeLong(claim.getLength());
out.writeLong(offset);
out.writeBoolean(resourceClaim.isLossTolerant());
}
}
private void deserializeClaim(final DataInputStream in, final int serializationVersion, final StandardFlowFileRecord.Builder ffBuilder) throws IOException {
// determine current Content Claim.
final int claimExists = in.read();
if (claimExists == 1) {
final String claimId;
if (serializationVersion < 4) {
claimId = String.valueOf(in.readLong());
} else {
claimId = readString(in);
}
final String container = readString(in);
final String section = readString(in);
final long resourceOffset;
final long resourceLength;
if (serializationVersion < 7) {
resourceOffset = 0L;
resourceLength = -1L;
} else {
resourceOffset = in.readLong();
resourceLength = in.readLong();
}
final long claimOffset = in.readLong();
final boolean lossTolerant;
if (serializationVersion >= 3) {
lossTolerant = in.readBoolean();
} else {
lossTolerant = false;
}
final ResourceClaim resourceClaim = claimManager.newResourceClaim(container, section, claimId, lossTolerant, false);
final StandardContentClaim contentClaim = new StandardContentClaim(resourceClaim, resourceOffset);
contentClaim.setLength(resourceLength);
ffBuilder.contentClaim(contentClaim);
ffBuilder.contentClaimOffset(claimOffset);
} else if (claimExists == -1) {
throw new EOFException();
} else if (claimExists != 0) {
throw new IOException("Claim Existence Qualifier not found in stream; found value: "
+ claimExists + " after successfully restoring " + recordsRestored + " records");
}
}
private void writeString(final String toWrite, final OutputStream out) throws IOException {
final byte[] bytes = toWrite.getBytes(StandardCharsets.UTF_8);
final int utflen = bytes.length;
if (utflen < 65535) {
out.write(utflen >>> 8);
out.write(utflen);
out.write(bytes);
} else {
out.write(255);
out.write(255);
out.write(utflen >>> 24);
out.write(utflen >>> 16);
out.write(utflen >>> 8);
out.write(utflen);
out.write(bytes);
}
}
private String readString(final InputStream in) throws IOException {
final Integer numBytes = readFieldLength(in);
if (numBytes == null) {
throw new EOFException();
}
final byte[] bytes = new byte[numBytes];
fillBuffer(in, bytes, numBytes);
return new String(bytes, StandardCharsets.UTF_8);
}
private Integer readFieldLength(final InputStream in) throws IOException {
final int firstValue = in.read();
final int secondValue = in.read();
if (firstValue < 0) {
return null;
}
if (secondValue < 0) {
throw new EOFException();
}
if (firstValue == 0xff && secondValue == 0xff) {
final int ch1 = in.read();
final int ch2 = in.read();
final int ch3 = in.read();
final int ch4 = in.read();
if ((ch1 | ch2 | ch3 | ch4) < 0) {
throw new EOFException();
}
return (ch1 << 24) + (ch2 << 16) + (ch3 << 8) + ch4;
} else {
return (firstValue << 8) + secondValue;
}
}
private void fillBuffer(final InputStream in, final byte[] buffer, final int length) throws IOException {
int bytesRead;
int totalBytesRead = 0;
while ((bytesRead = in.read(buffer, totalBytesRead, length - totalBytesRead)) > 0) {
totalBytesRead += bytesRead;
}
if (totalBytesRead != length) {
throw new EOFException();
}
}
@Override
public int getVersion() {
return CURRENT_ENCODING_VERSION;
}
} | apache-2.0 |
donNewtonAlpha/onos | web/api/src/test/java/org/onosproject/rest/resources/StatisticsResourceTest.java | 6923 | /*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.rest.resources;
import com.eclipsesource.json.Json;
import com.eclipsesource.json.JsonArray;
import com.eclipsesource.json.JsonObject;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.junit.Before;
import org.junit.Test;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.osgi.TestServiceDirectory;
import org.onlab.rest.BaseResource;
import org.onosproject.codec.CodecService;
import org.onosproject.codec.impl.CodecManager;
import org.onosproject.net.Link;
import org.onosproject.net.link.LinkService;
import org.onosproject.net.statistic.DefaultLoad;
import org.onosproject.net.statistic.StatisticService;
import javax.ws.rs.client.WebTarget;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.HashMap;
import java.util.stream.IntStream;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.onosproject.net.NetTestTools.connectPoint;
import static org.onosproject.net.NetTestTools.link;
/**
* Unit tests for statistics REST APIs.
*/
public class StatisticsResourceTest extends ResourceTest {
Link link1 = link("src1", 1, "dst1", 1);
Link link2 = link("src2", 2, "dst2", 2);
Link link3 = link("src3", 3, "dst3", 3);
LinkService mockLinkService;
StatisticService mockStatisticService;
/**
* Initializes test mocks and environment.
*/
@Before
public void setUpTest() {
mockLinkService = createMock(LinkService.class);
expect(mockLinkService.getLinks())
.andReturn(ImmutableList.of(link1, link2, link3));
expect(mockLinkService.getLinks(connectPoint("0000000000000001", 2)))
.andReturn(ImmutableSet.of(link3));
mockStatisticService = createMock(StatisticService.class);
expect(mockStatisticService.load(link1))
.andReturn(new DefaultLoad(2, 1, 1));
expect(mockStatisticService.load(link2))
.andReturn(new DefaultLoad(22, 11, 1));
expect(mockStatisticService.load(link3))
.andReturn(new DefaultLoad(222, 111, 1));
replay(mockLinkService, mockStatisticService);
// Register the services needed for the test
CodecManager codecService = new CodecManager();
codecService.activate();
ServiceDirectory testDirectory =
new TestServiceDirectory()
.add(LinkService.class, mockLinkService)
.add(StatisticService.class, mockStatisticService)
.add(CodecService.class, codecService);
BaseResource.setServiceDirectory(testDirectory);
}
/**
* Checks that the values in a JSON representation of a Load are
* correct.
*
* @param load JSON for the Loan object
* @param rate expected vale fo rate
* @param latest expected value for latest
* @param valid expected value for valid flag
* @param device expected device ID
*/
private void checkValues(JsonObject load, int rate, int latest,
boolean valid, String device) throws UnsupportedEncodingException {
assertThat(load, notNullValue());
assertThat(load.get("rate").asInt(), is(rate));
assertThat(load.get("latest").asInt(), is(latest));
assertThat(load.get("valid").asBoolean(), is(valid));
assertThat(load.get("time").asLong(),
lessThanOrEqualTo((System.currentTimeMillis())));
assertThat(URLDecoder.decode(load.get("link").asString(), "UTF-8"),
containsString("device=of:" + device));
}
/**
* Tests GET of a single Load statistics object.
*/
@Test
public void testSingleLoadGet() throws UnsupportedEncodingException {
final WebTarget wt = target();
final String response = wt.path("statistics/flows/link")
.queryParam("device", "of:0000000000000001")
.queryParam("port", "2")
.request()
.get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
assertThat(result.names().get(0), is("loads"));
final JsonArray jsonLoads = result.get("loads").asArray();
assertThat(jsonLoads, notNullValue());
assertThat(jsonLoads.size(), is(1));
JsonObject load1 = jsonLoads.get(0).asObject();
checkValues(load1, 111, 222, true, "src3");
}
/**
* Tests GET of all Load statistics objects.
*/
@Test
public void testLoadsGet() throws UnsupportedEncodingException {
final WebTarget wt = target();
final String response = wt.path("statistics/flows/link/").request().get(String.class);
final JsonObject result = Json.parse(response).asObject();
assertThat(result, notNullValue());
assertThat(result.names(), hasSize(1));
assertThat(result.names().get(0), is("loads"));
final JsonArray jsonLoads = result.get("loads").asArray();
assertThat(jsonLoads, notNullValue());
assertThat(jsonLoads.size(), is(3));
// Hash the loads by the current field to allow easy lookup if the
// order changes.
HashMap<Integer, JsonObject> currentMap = new HashMap<>();
IntStream.range(0, jsonLoads.size())
.forEach(index -> currentMap.put(
jsonLoads.get(index).asObject().get("latest").asInt(),
jsonLoads.get(index).asObject()));
JsonObject load1 = currentMap.get(2);
checkValues(load1, 1, 2, true, "src1");
JsonObject load2 = currentMap.get(22);
checkValues(load2, 11, 22, true, "src2");
JsonObject load3 = currentMap.get(222);
checkValues(load3, 111, 222, true, "src3");
}
}
| apache-2.0 |
apetro/uPortal | uportal-war/src/test/java/org/springframework/web/client/interceptors/BasicAuthInterceptorTest.java | 4747 | /**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.springframework.web.client.interceptors;
import java.io.ByteArrayOutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.junit.Test;
import org.springframework.core.env.PropertyResolver;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.client.ClientHttpRequest;
import org.springframework.http.client.ClientHttpRequestFactory;
import org.springframework.http.client.ClientHttpRequestInterceptor;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.web.client.RestTemplate;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasKey;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* @author Josh Helmer, jhelmer@unicon.net
*/
public class BasicAuthInterceptorTest {
@Test
public void testInterceptorWithUsernamePassword() throws Exception {
final String id = "test";
final String username = "test";
final String password = "test";
PropertyResolver resolver = mock(PropertyResolver.class);
when(resolver.getProperty(eq("org.jasig.rest.interceptor.basic-auth." + id + ".username"))).thenReturn(username);
when(resolver.getProperty(eq("org.jasig.rest.interceptor.basic-auth." + id + ".password"))).thenReturn(password);
doInterceptorTest(resolver, id, "dGVzdDp0ZXN0");
}
@Test
public void testInterceptorWithAuthCode() throws Exception {
final String id = "test";
final String authCode = "c29tZUxvbmdVc2VybmFtZTpzb21lTG9uZ1Bhc3N3b3Jk";
PropertyResolver resolver = mock(PropertyResolver.class);
when(resolver.getProperty(eq("org.jasig.rest.interceptor.basic-auth." + id + ".authCode"))).thenReturn(authCode);
doInterceptorTest(resolver, id, authCode);
}
private void doInterceptorTest(PropertyResolver resolver, String id, String expectedAuthCode) throws Exception {
final String url = "http://www.test.com/lrs";
final String data = "test";
final String expectedHeader = "Basic " + expectedAuthCode;
// holder for the headers...
HttpHeaders headers = new HttpHeaders();
// Mock guts of RestTemplate so no need to actually hit the web...
ClientHttpResponse resp = mock(ClientHttpResponse.class);
when(resp.getStatusCode()).thenReturn(HttpStatus.ACCEPTED);
when(resp.getHeaders()).thenReturn(new HttpHeaders());
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
ClientHttpRequest client = mock(ClientHttpRequest.class);
when(client.getHeaders()).thenReturn(headers);
when(client.getBody()).thenReturn(buffer);
when(client.execute()).thenReturn(resp);
ClientHttpRequestFactory factory = mock(ClientHttpRequestFactory.class);
when(factory.createRequest(any(URI.class), any(HttpMethod.class))).thenReturn(client);
// add the new interceptor...
BasicAuthInterceptor interceptor = new BasicAuthInterceptor();
interceptor.setPropertyResolver(resolver);
interceptor.setId(id);
List<ClientHttpRequestInterceptor> interceptors = new ArrayList<ClientHttpRequestInterceptor>();
interceptors.add(interceptor);
RestTemplate rest = new RestTemplate(factory);
rest.setInterceptors(interceptors);
// do it...
rest.postForLocation(url, data, Collections.emptyMap());
// make sure auth header is correctly set...
assertThat(headers, hasKey(Headers.Authorization.name()));
assertThat(headers.get(Headers.Authorization.name()), contains(expectedHeader));
}
}
| apache-2.0 |
rpeleias/vraptor4 | vraptor-core/src/test/java/br/com/caelum/vraptor/interceptor/AspectStyleInterceptorHandlerTest.java | 10697 | /***
* Copyright (c) 2009 Caelum - www.caelum.com.br/opensource All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package br.com.caelum.vraptor.interceptor;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import br.com.caelum.vraptor.controller.ControllerInstance;
import br.com.caelum.vraptor.controller.ControllerMethod;
import br.com.caelum.vraptor.core.DefaultReflectionProvider;
import br.com.caelum.vraptor.core.InterceptorStack;
import br.com.caelum.vraptor.interceptor.example.AcceptsInterceptor;
import br.com.caelum.vraptor.interceptor.example.AcceptsWithoutArgsInterceptor;
import br.com.caelum.vraptor.interceptor.example.AlwaysAcceptsAspectInterceptor;
import br.com.caelum.vraptor.interceptor.example.ExampleOfSimpleStackInterceptor;
import br.com.caelum.vraptor.interceptor.example.InterceptorWithCustomizedAccepts;
import br.com.caelum.vraptor.interceptor.example.MethodLevelAcceptsController;
import br.com.caelum.vraptor.interceptor.example.WithoutAroundInterceptor;
import br.com.caelum.vraptor.ioc.Container;
import br.com.caelum.vraptor.util.test.MockInstanceImpl;
public class AspectStyleInterceptorHandlerTest {
private StepInvoker stepInvoker;
private @Mock InterceptorStack stack;
private @Mock ControllerMethod controllerMethod;
private @Mock Object currentController;
private @Mock WithAnnotationAcceptor withAnnotationAcceptor;
private @Mock ControllerInstance controllerInstance;
private @Mock SimpleInterceptorStack simpleInterceptorStack;
private InterceptorAcceptsExecutor acceptsExecutor;
private CustomAcceptsExecutor customAcceptsExecutor;
private Container container;
private InterceptorExecutor interceptorExecutor;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
stepInvoker = new StepInvoker(new DefaultReflectionProvider());
}
@Test
public void shouldAlwaysCallAround() {
AlwaysAcceptsAspectInterceptor interceptor = spy(new AlwaysAcceptsAspectInterceptor());
AspectStyleInterceptorHandler handler = newAspectStyleInterceptorHandler(
AlwaysAcceptsAspectInterceptor.class, interceptor);
handler.execute(stack, controllerMethod, currentController);
verify(interceptor).intercept(Mockito.same(stack),
Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class));
}
@Test
public void shouldInvokeUsingBeforeAndAfter() {
AlwaysAcceptsAspectInterceptor interceptor = spy(new AlwaysAcceptsAspectInterceptor());
AspectStyleInterceptorHandler handler = newAspectStyleInterceptorHandler(
AlwaysAcceptsAspectInterceptor.class, interceptor);
handler.execute(stack, controllerMethod, currentController);
InOrder order = inOrder(interceptor);
order.verify(interceptor).begin();
order.verify(interceptor).intercept(
Mockito.same(stack),
Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class));
order.verify(interceptor).after();
}
@Test
public void shouldInvokeIfAccepts() {
AcceptsInterceptor acceptsInterceptor = spy(new AcceptsInterceptor(true));
AspectStyleInterceptorHandler aspectHandler = newAspectStyleInterceptorHandler(
AcceptsInterceptor.class, acceptsInterceptor);
aspectHandler.execute(stack, controllerMethod, currentController);
InOrder order = inOrder(acceptsInterceptor);
order.verify(acceptsInterceptor).accepts(controllerMethod);
order.verify(acceptsInterceptor).before();
order.verify(acceptsInterceptor).around(
Mockito.same(stack),
Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class));
order.verify(acceptsInterceptor).after();
}
@Test
public void shouldNotInvokeIfDoesNotAccept() {
AcceptsInterceptor acceptsInterceptor = spy(new AcceptsInterceptor(
false));
AspectStyleInterceptorHandler aspectHandler = newAspectStyleInterceptorHandler(
AcceptsInterceptor.class, acceptsInterceptor);
aspectHandler.execute(stack, controllerMethod, currentController);
verify(acceptsInterceptor).accepts(controllerMethod);
verify(acceptsInterceptor, never()).before();
verify(acceptsInterceptor, never()).around(Mockito.same(stack),
Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class));
verify(acceptsInterceptor, never()).after();
}
@Test
public void shouldInvokeAcceptsWithoutArgs() {
AcceptsWithoutArgsInterceptor acceptsWithoutArgsInterceptor = spy(new AcceptsWithoutArgsInterceptor());
AspectStyleInterceptorHandler aspectHandler = newAspectStyleInterceptorHandler(
AcceptsWithoutArgsInterceptor.class,
acceptsWithoutArgsInterceptor);
aspectHandler.execute(stack, controllerMethod, currentController);
InOrder order = inOrder(acceptsWithoutArgsInterceptor);
order.verify(acceptsWithoutArgsInterceptor).accepts();
order.verify(acceptsWithoutArgsInterceptor).before();
order.verify(acceptsWithoutArgsInterceptor).around(
Mockito.same(stack),
Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class));
order.verify(acceptsWithoutArgsInterceptor).after();
}
@Test
public void shouldInvokeAroundWithSimpleStack() {
ExampleOfSimpleStackInterceptor simpleStackInterceptor = spy(new ExampleOfSimpleStackInterceptor());
AspectStyleInterceptorHandler aspectHandler = newAspectStyleInterceptorHandler(
ExampleOfSimpleStackInterceptor.class, simpleStackInterceptor);
aspectHandler.execute(stack, controllerMethod, currentController);
verify(simpleStackInterceptor).around(
Mockito.any(SimpleInterceptorStack.class));
}
@Test
public void shouldInvokeNextIfNotAccepts() throws Exception {
AcceptsInterceptor interceptor = spy(new AcceptsInterceptor(false));
AspectStyleInterceptorHandler aspectHandler = newAspectStyleInterceptorHandler(
AcceptsInterceptor.class, interceptor);
aspectHandler.execute(stack, controllerMethod, null);
verify(interceptor, never()).around(
Mockito.any(InterceptorStack.class),
Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class));
verify(stack).next(Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class));
}
@Test
public void shouldNotInvokeIfDoesNotHaveAround() throws Exception {
WithoutAroundInterceptor interceptor = spy(new WithoutAroundInterceptor());
AspectStyleInterceptorHandler aspectHandler = newAspectStyleInterceptorHandler(
WithoutAroundInterceptor.class, interceptor);
aspectHandler.execute(stack, controllerMethod, null);
verify(simpleInterceptorStack).next();
}
@Test
public void shouldAcceptCustomizedAccepts() throws Exception {
InterceptorWithCustomizedAccepts interceptor = new InterceptorWithCustomizedAccepts();
AspectStyleInterceptorHandler aspectHandler = newAspectStyleInterceptorHandler(
InterceptorWithCustomizedAccepts.class, interceptor,
withAnnotationAcceptor);
when(withAnnotationAcceptor.validate(Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class))).thenReturn(true);
aspectHandler.execute(stack, controllerMethod, new MethodLevelAcceptsController());
assertTrue(interceptor.isBeforeCalled());
assertTrue(interceptor.isInterceptCalled());
assertTrue(interceptor.isAfterCalled());
}
@Test
public void shouldNotAcceptCustomizedAccepts() throws Exception {
InterceptorWithCustomizedAccepts interceptor = new InterceptorWithCustomizedAccepts();
AspectStyleInterceptorHandler aspectHandler = newAspectStyleInterceptorHandler(
InterceptorWithCustomizedAccepts.class, interceptor,withAnnotationAcceptor);
when(withAnnotationAcceptor.validate(Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class))).thenReturn(false);
aspectHandler.execute(stack, controllerMethod, new MethodLevelAcceptsController());
assertFalse(interceptor.isBeforeCalled());
assertFalse(interceptor.isInterceptCalled());
assertFalse(interceptor.isAfterCalled());
}
@Test
public void shouldInvokeCustomAcceptsFailCallback() {
InterceptorWithCustomizedAccepts interceptor = spy(new InterceptorWithCustomizedAccepts());
AspectStyleInterceptorHandler aspectHandler = newAspectStyleInterceptorHandler(
InterceptorWithCustomizedAccepts.class, interceptor,
withAnnotationAcceptor);
when(withAnnotationAcceptor.validate(Mockito.same(controllerMethod),
Mockito.any(ControllerInstance.class))).thenReturn(false);
aspectHandler.execute(stack, controllerMethod, aspectHandler);
verify(interceptor).customAcceptsFailCallback();
}
private AspectStyleInterceptorHandler newAspectStyleInterceptorHandler(Class<?> interceptorClass, Object... dependencies) {
List<Object> deps = new ArrayList<>(Arrays.asList(dependencies));
boolean hasControllerInstance = false;
for (Object object : deps) {
if(ControllerInstance.class.isAssignableFrom(object.getClass())){
hasControllerInstance = true;
break;
}
}
if(!hasControllerInstance){
deps.add(controllerInstance);
}
deps.add(stack);
deps.add(controllerMethod);
deps.add(simpleInterceptorStack);
container = new InstanceContainer(deps.toArray());
InterceptorMethodParametersResolver parametersResolver = new InterceptorMethodParametersResolver(container);
acceptsExecutor = new InterceptorAcceptsExecutor(parametersResolver, stepInvoker);
customAcceptsExecutor = new CustomAcceptsExecutor(
new MockInstanceImpl<>(controllerMethod),
new MockInstanceImpl<>(controllerInstance),
stepInvoker, new CustomAcceptsVerifier(container));
interceptorExecutor = new InterceptorExecutor(stepInvoker, parametersResolver,
new MockInstanceImpl<>(simpleInterceptorStack));
return new AspectStyleInterceptorHandler(interceptorClass, stepInvoker, container, customAcceptsExecutor,
acceptsExecutor, interceptorExecutor);
}
} | apache-2.0 |
donNewtonAlpha/onos | protocols/isis/isisio/src/main/java/org/onosproject/isis/io/isispacket/package-info.java | 710 | /*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Implementation of the ISIS protocol.
*/
package org.onosproject.isis.io.isispacket; | apache-2.0 |
lejingw/SuperToasts | demo/src/com/supertoastsdemo/ActivityTwo.java | 367 | package com.supertoastsdemo;
import android.app.Activity;
import android.os.Bundle;
/* This class does nothing, it is used for demonstration purposes */
public class ActivityTwo extends Activity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_two);
}
} | apache-2.0 |
mdeinum/spring-boot | spring-boot-project/spring-boot/src/main/java/org/springframework/boot/context/properties/ConfigurationPropertiesScanRegistrar.java | 4911 | /*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.context.properties;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.Set;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.boot.context.TypeExcludeFilter;
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.annotation.MergedAnnotations;
import org.springframework.core.annotation.MergedAnnotations.SearchStrategy;
import org.springframework.core.env.Environment;
import org.springframework.core.io.ResourceLoader;
import org.springframework.core.type.AnnotationMetadata;
import org.springframework.core.type.filter.AnnotationTypeFilter;
import org.springframework.stereotype.Component;
import org.springframework.util.ClassUtils;
import org.springframework.util.StringUtils;
/**
* {@link ImportBeanDefinitionRegistrar} for registering
* {@link ConfigurationProperties @ConfigurationProperties} bean definitions via scanning.
*
* @author Madhura Bhave
* @author Phillip Webb
*/
class ConfigurationPropertiesScanRegistrar implements ImportBeanDefinitionRegistrar {
private final Environment environment;
private final ResourceLoader resourceLoader;
ConfigurationPropertiesScanRegistrar(Environment environment, ResourceLoader resourceLoader) {
this.environment = environment;
this.resourceLoader = resourceLoader;
}
@Override
public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata, BeanDefinitionRegistry registry) {
Set<String> packagesToScan = getPackagesToScan(importingClassMetadata);
scan(registry, packagesToScan);
}
private Set<String> getPackagesToScan(AnnotationMetadata metadata) {
AnnotationAttributes attributes = AnnotationAttributes
.fromMap(metadata.getAnnotationAttributes(ConfigurationPropertiesScan.class.getName()));
String[] basePackages = attributes.getStringArray("basePackages");
Class<?>[] basePackageClasses = attributes.getClassArray("basePackageClasses");
Set<String> packagesToScan = new LinkedHashSet<>(Arrays.asList(basePackages));
for (Class<?> basePackageClass : basePackageClasses) {
packagesToScan.add(ClassUtils.getPackageName(basePackageClass));
}
if (packagesToScan.isEmpty()) {
packagesToScan.add(ClassUtils.getPackageName(metadata.getClassName()));
}
packagesToScan.removeIf((candidate) -> !StringUtils.hasText(candidate));
return packagesToScan;
}
private void scan(BeanDefinitionRegistry registry, Set<String> packages) {
ConfigurationPropertiesBeanRegistrar registrar = new ConfigurationPropertiesBeanRegistrar(registry);
ClassPathScanningCandidateComponentProvider scanner = getScanner(registry);
for (String basePackage : packages) {
for (BeanDefinition candidate : scanner.findCandidateComponents(basePackage)) {
register(registrar, candidate.getBeanClassName());
}
}
}
private ClassPathScanningCandidateComponentProvider getScanner(BeanDefinitionRegistry registry) {
ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(false);
scanner.setEnvironment(this.environment);
scanner.setResourceLoader(this.resourceLoader);
scanner.addIncludeFilter(new AnnotationTypeFilter(ConfigurationProperties.class));
TypeExcludeFilter typeExcludeFilter = new TypeExcludeFilter();
typeExcludeFilter.setBeanFactory((BeanFactory) registry);
scanner.addExcludeFilter(typeExcludeFilter);
return scanner;
}
private void register(ConfigurationPropertiesBeanRegistrar registrar, String className) throws LinkageError {
try {
register(registrar, ClassUtils.forName(className, null));
}
catch (ClassNotFoundException ex) {
// Ignore
}
}
private void register(ConfigurationPropertiesBeanRegistrar registrar, Class<?> type) {
if (!isComponent(type)) {
registrar.register(type);
}
}
private boolean isComponent(Class<?> type) {
return MergedAnnotations.from(type, SearchStrategy.TYPE_HIERARCHY).isPresent(Component.class);
}
}
| apache-2.0 |
richardgutkowski/ansible-roles | stash/files/mysql-connector-java-5.1.35/src/com/mysql/fabric/ServerRole.java | 1323 | /*
Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
The MySQL Connector/J is licensed under the terms of the GPLv2
<http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most MySQL Connectors.
There are special exceptions to the terms and conditions of the GPLv2 as it is applied to
this software, see the FLOSS License Exception
<http://www.mysql.com/about/legal/licensing/foss-exception.html>.
This program is free software; you can redistribute it and/or modify it under the terms
of the GNU General Public License as published by the Free Software Foundation; version 2
of the License.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this
program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth
Floor, Boston, MA 02110-1301 USA
*/
package com.mysql.fabric;
/**
* Server roles.
*/
public enum ServerRole {
FAULTY, SPARE, SECONDARY, PRIMARY;
public static ServerRole getFromConstant(Integer constant) {
return values()[constant];
}
}
| mit |
ZephyrSurfer/dolphin | Source/Android/app/src/main/java/org/dolphinemu/dolphinemu/features/riivolution/ui/RiivolutionItem.java | 820 | // SPDX-License-Identifier: GPL-2.0-or-later
package org.dolphinemu.dolphinemu.features.riivolution.ui;
public class RiivolutionItem
{
public final int mDiscIndex;
public final int mSectionIndex;
public final int mOptionIndex;
/**
* Constructor for a disc.
*/
public RiivolutionItem(int discIndex)
{
mDiscIndex = discIndex;
mSectionIndex = -1;
mOptionIndex = -1;
}
/**
* Constructor for a section.
*/
public RiivolutionItem(int discIndex, int sectionIndex)
{
mDiscIndex = discIndex;
mSectionIndex = sectionIndex;
mOptionIndex = -1;
}
/**
* Constructor for an option.
*/
public RiivolutionItem(int discIndex, int sectionIndex, int optionIndex)
{
mDiscIndex = discIndex;
mSectionIndex = sectionIndex;
mOptionIndex = optionIndex;
}
}
| gpl-2.0 |
curso007/camel | components/camel-optaplanner/src/main/java/org/apache/camel/component/optaplanner/OptaPlannerProducer.java | 6011 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.optaplanner;
import java.util.concurrent.ExecutorService;
import org.apache.camel.Exchange;
import org.apache.camel.impl.DefaultProducer;
import org.optaplanner.core.api.domain.solution.PlanningSolution;
import org.optaplanner.core.api.domain.solution.Solution;
import org.optaplanner.core.api.solver.Solver;
import org.optaplanner.core.impl.solver.ProblemFactChange;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OptaPlannerProducer extends DefaultProducer {
private static final transient Logger LOGGER = LoggerFactory.getLogger(OptaPlannerProducer.class);
private ExecutorService executor;
private final OptaPlannerEndpoint endpoint;
private final OptaPlannerConfiguration configuration;
public OptaPlannerProducer(OptaPlannerEndpoint endpoint, OptaPlannerConfiguration configuration) {
super(endpoint);
this.endpoint = endpoint;
this.configuration = configuration;
}
@Override
protected void doStart() throws Exception {
if (configuration.isAsync()) {
executor = endpoint.getCamelContext().getExecutorServiceManager().newFixedThreadPool(this, endpoint.getEndpointUri(), configuration.getThreadPoolSize());
}
super.doStart();
}
@Override
protected void doStop() throws Exception {
if (executor != null) {
endpoint.getCamelContext().getExecutorServiceManager().shutdown(executor);
executor = null;
}
super.doStop();
}
@SuppressWarnings("unchecked")
@Override
public synchronized void process(Exchange exchange) throws Exception {
final Object body = exchange.getIn().getMandatoryBody();
final String solverId = getSolverId(exchange);
/*
* Keep for backward compatibility untill optaplanner version 8.0.0 not
* released After that the code '|| body instanceof Solution' need to be
* removed
*/
if (body.getClass().isAnnotationPresent(PlanningSolution.class) || body instanceof Solution) {
if (isAsync(exchange)) {
LOGGER.debug("Asynchronously solving problem: [{}] with id [{}]", body, solverId);
final Solver<Object> solver = endpoint.getOrCreateSolver(solverId);
executor.submit(new Runnable() {
@Override
public void run() {
try {
solver.solve(body);
} catch (Throwable e) {
LOGGER.error("Asynchronously solving failed for solverId ({})", solverId, e);
}
}
});
} else {
LOGGER.debug("Synchronously solving problem: [{}] with id [{}]", body, solverId);
Solver<Object> solver = endpoint.getSolver(solverId);
if (solver == null) {
solver = endpoint.createSolver();
}
solver.solve(body);
populateResult(exchange, solver);
}
} else if (body instanceof ProblemFactChange) {
LOGGER.debug("Adding ProblemFactChange to solver: [{}] with id [{}]", body, solverId);
Solver<Object> solver = endpoint.getOrCreateSolver(solverId);
solver.addProblemFactChange((ProblemFactChange<Object>)body);
if (!isAsync(exchange)) {
while (!solver.isEveryProblemFactChangeProcessed()) {
Thread.sleep(OptaPlannerConstants.IS_EVERY_PROBLEM_FACT_CHANGE_DELAY);
}
}
populateResult(exchange, solver);
} else {
LOGGER.debug("Retrieving best score for solver: [{}]", solverId);
Solver<Object> solver = endpoint.getSolver(solverId);
if (solver == null) {
throw new RuntimeException("Solver not found: " + solverId);
}
populateResult(exchange, solver);
}
}
private void populateResult(Exchange exchange, Solver<Object> solver) {
exchange.getIn().setBody(solver.getBestSolution());
exchange.getIn().setHeader(OptaPlannerConstants.TIME_SPENT, solver.getTimeMillisSpent());
exchange.getIn().setHeader(OptaPlannerConstants.IS_EVERY_PROBLEM_FACT_CHANGE_PROCESSED, solver.isEveryProblemFactChangeProcessed());
exchange.getIn().setHeader(OptaPlannerConstants.IS_TERMINATE_EARLY, solver.isTerminateEarly());
exchange.getIn().setHeader(OptaPlannerConstants.IS_SOLVING, solver.isSolving());
}
private String getSolverId(Exchange exchange) throws Exception {
String solverId = exchange.getIn().getHeader(OptaPlannerConstants.SOLVER_ID, String.class);
if (solverId == null) {
solverId = configuration.getSolverId();
}
LOGGER.debug("SolverId: [{}]", solverId);
return solverId;
}
private boolean isAsync(Exchange exchange) {
Boolean isAsync = exchange.getIn().getHeader(OptaPlannerConstants.IS_ASYNC, Boolean.class);
return isAsync != null ? isAsync : configuration.isAsync();
}
}
| apache-2.0 |
tiarebalbi/spring-boot | spring-boot-project/spring-boot-actuator-autoconfigure/src/test/java/org/springframework/boot/actuate/autoconfigure/endpoint/web/documentation/MappingsEndpointServletDocumentationTests.java | 10061 | /*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.autoconfigure.endpoint.web.documentation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.boot.actuate.web.mappings.MappingDescriptionProvider;
import org.springframework.boot.actuate.web.mappings.MappingsEndpoint;
import org.springframework.boot.actuate.web.mappings.servlet.DispatcherServletsMappingDescriptionProvider;
import org.springframework.boot.actuate.web.mappings.servlet.FiltersMappingDescriptionProvider;
import org.springframework.boot.actuate.web.mappings.servlet.ServletsMappingDescriptionProvider;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.http.MediaType;
import org.springframework.restdocs.RestDocumentationContextProvider;
import org.springframework.restdocs.RestDocumentationExtension;
import org.springframework.restdocs.payload.FieldDescriptor;
import org.springframework.restdocs.payload.JsonFieldType;
import org.springframework.restdocs.payload.ResponseFieldsSnippet;
import org.springframework.test.web.reactive.server.WebTestClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RestController;
import static org.springframework.restdocs.payload.PayloadDocumentation.beneathPath;
import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath;
import static org.springframework.restdocs.payload.PayloadDocumentation.responseFields;
import static org.springframework.restdocs.payload.PayloadDocumentation.subsectionWithPath;
import static org.springframework.restdocs.webtestclient.WebTestClientRestDocumentation.document;
import static org.springframework.restdocs.webtestclient.WebTestClientRestDocumentation.documentationConfiguration;
/**
* Tests for generating documentation describing {@link MappingsEndpoint}.
*
* @author Andy Wilkinson
*/
@ExtendWith(RestDocumentationExtension.class)
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
class MappingsEndpointServletDocumentationTests extends AbstractEndpointDocumentationTests {
@LocalServerPort
private int port;
private WebTestClient client;
@BeforeEach
void webTestClient(RestDocumentationContextProvider restDocumentation) {
this.client = WebTestClient.bindToServer().filter(documentationConfiguration(restDocumentation))
.baseUrl("http://localhost:" + this.port).build();
}
@Test
void mappings() throws Exception {
ResponseFieldsSnippet commonResponseFields = responseFields(
fieldWithPath("contexts").description("Application contexts keyed by id."),
fieldWithPath("contexts.*.mappings").description("Mappings in the context, keyed by mapping type."),
subsectionWithPath("contexts.*.mappings.dispatcherServlets")
.description("Dispatcher servlet mappings, if any."),
subsectionWithPath("contexts.*.mappings.servletFilters")
.description("Servlet filter mappings, if any."),
subsectionWithPath("contexts.*.mappings.servlets").description("Servlet mappings, if any."),
subsectionWithPath("contexts.*.mappings.dispatcherHandlers")
.description("Dispatcher handler mappings, if any.").optional().type(JsonFieldType.OBJECT),
parentIdField());
List<FieldDescriptor> dispatcherServletFields = new ArrayList<>(Arrays.asList(
fieldWithPath("*")
.description("Dispatcher servlet mappings, if any, keyed by dispatcher servlet bean name."),
fieldWithPath("*.[].details").optional().type(JsonFieldType.OBJECT)
.description("Additional implementation-specific details about the mapping. Optional."),
fieldWithPath("*.[].handler").description("Handler for the mapping."),
fieldWithPath("*.[].predicate").description("Predicate for the mapping.")));
List<FieldDescriptor> requestMappingConditions = Arrays.asList(
requestMappingConditionField("").description("Details of the request mapping conditions.").optional(),
requestMappingConditionField(".consumes").description("Details of the consumes condition"),
requestMappingConditionField(".consumes.[].mediaType").description("Consumed media type."),
requestMappingConditionField(".consumes.[].negated").description("Whether the media type is negated."),
requestMappingConditionField(".headers").description("Details of the headers condition."),
requestMappingConditionField(".headers.[].name").description("Name of the header."),
requestMappingConditionField(".headers.[].value").description("Required value of the header, if any."),
requestMappingConditionField(".headers.[].negated").description("Whether the value is negated."),
requestMappingConditionField(".methods").description("HTTP methods that are handled."),
requestMappingConditionField(".params").description("Details of the params condition."),
requestMappingConditionField(".params.[].name").description("Name of the parameter."),
requestMappingConditionField(".params.[].value")
.description("Required value of the parameter, if any."),
requestMappingConditionField(".params.[].negated").description("Whether the value is negated."),
requestMappingConditionField(".patterns")
.description("Patterns identifying the paths handled by the mapping."),
requestMappingConditionField(".produces").description("Details of the produces condition."),
requestMappingConditionField(".produces.[].mediaType").description("Produced media type."),
requestMappingConditionField(".produces.[].negated").description("Whether the media type is negated."));
List<FieldDescriptor> handlerMethod = Arrays.asList(
fieldWithPath("*.[].details.handlerMethod").optional().type(JsonFieldType.OBJECT)
.description("Details of the method, if any, that will handle requests to this mapping."),
fieldWithPath("*.[].details.handlerMethod.className")
.description("Fully qualified name of the class of the method."),
fieldWithPath("*.[].details.handlerMethod.name").description("Name of the method."),
fieldWithPath("*.[].details.handlerMethod.descriptor")
.description("Descriptor of the method as specified in the Java Language Specification."));
dispatcherServletFields.addAll(handlerMethod);
dispatcherServletFields.addAll(requestMappingConditions);
this.client.get().uri("/actuator/mappings").exchange().expectBody()
.consumeWith(document("mappings", commonResponseFields,
responseFields(beneathPath("contexts.*.mappings.dispatcherServlets")
.withSubsectionId("dispatcher-servlets"), dispatcherServletFields),
responseFields(
beneathPath("contexts.*.mappings.servletFilters").withSubsectionId("servlet-filters"),
fieldWithPath("[].servletNameMappings")
.description("Names of the servlets to which the filter is mapped."),
fieldWithPath("[].urlPatternMappings")
.description("URL pattern to which the filter is mapped."),
fieldWithPath("[].name").description("Name of the filter."),
fieldWithPath("[].className").description("Class name of the filter")),
responseFields(beneathPath("contexts.*.mappings.servlets").withSubsectionId("servlets"),
fieldWithPath("[].mappings").description("Mappings of the servlet."),
fieldWithPath("[].name").description("Name of the servlet."),
fieldWithPath("[].className").description("Class name of the servlet"))));
}
private FieldDescriptor requestMappingConditionField(String path) {
return fieldWithPath("*.[].details.requestMappingConditions" + path);
}
@Configuration(proxyBeanMethods = false)
@Import(BaseDocumentationConfiguration.class)
static class TestConfiguration {
@Bean
TomcatServletWebServerFactory tomcat() {
return new TomcatServletWebServerFactory(0);
}
@Bean
DispatcherServletsMappingDescriptionProvider dispatcherServletsMappingDescriptionProvider() {
return new DispatcherServletsMappingDescriptionProvider();
}
@Bean
ServletsMappingDescriptionProvider servletsMappingDescriptionProvider() {
return new ServletsMappingDescriptionProvider();
}
@Bean
FiltersMappingDescriptionProvider filtersMappingDescriptionProvider() {
return new FiltersMappingDescriptionProvider();
}
@Bean
MappingsEndpoint mappingsEndpoint(Collection<MappingDescriptionProvider> descriptionProviders,
ConfigurableApplicationContext context) {
return new MappingsEndpoint(descriptionProviders, context);
}
@Bean
ExampleController exampleController() {
return new ExampleController();
}
}
@RestController
static class ExampleController {
@PostMapping(path = "/", consumes = { MediaType.APPLICATION_JSON_VALUE, "!application/xml" },
produces = MediaType.TEXT_PLAIN_VALUE, headers = "X-Custom=Foo", params = "a!=alpha")
String example() {
return "Hello World";
}
}
}
| apache-2.0 |
jhrcek/kie-wb-common | kie-wb-common-screens/kie-wb-common-data-modeller/kie-wb-common-data-modeller-client/src/main/java/org/kie/workbench/common/screens/datamodeller/client/model/DataModelerPropertyEditorType.java | 765 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.screens.datamodeller.client.model;
import org.uberfire.ext.properties.editor.model.PropertyEditorType;
public enum DataModelerPropertyEditorType {
}
| apache-2.0 |
sarkanyi/libgdx | extensions/gdx-bullet/jni/swig-src/collision/com/badlogic/gdx/physics/bullet/collision/btOptimizedBvh.java | 4604 | /* ----------------------------------------------------------------------------
* This file was automatically generated by SWIG (http://www.swig.org).
* Version 3.0.11
*
* Do not make changes to this file unless you know what you are doing--modify
* the SWIG interface file instead.
* ----------------------------------------------------------------------------- */
package com.badlogic.gdx.physics.bullet.collision;
import com.badlogic.gdx.physics.bullet.BulletBase;
import com.badlogic.gdx.physics.bullet.linearmath.*;
import com.badlogic.gdx.math.Vector3;
import com.badlogic.gdx.math.Quaternion;
import com.badlogic.gdx.math.Matrix3;
import com.badlogic.gdx.math.Matrix4;
public class btOptimizedBvh extends btQuantizedBvh {
private long swigCPtr;
protected btOptimizedBvh(final String className, long cPtr, boolean cMemoryOwn) {
super(className, CollisionJNI.btOptimizedBvh_SWIGUpcast(cPtr), cMemoryOwn);
swigCPtr = cPtr;
}
/** Construct a new btOptimizedBvh, normally you should not need this constructor it's intended for low-level usage. */
public btOptimizedBvh(long cPtr, boolean cMemoryOwn) {
this("btOptimizedBvh", cPtr, cMemoryOwn);
construct();
}
@Override
protected void reset(long cPtr, boolean cMemoryOwn) {
if (!destroyed)
destroy();
super.reset(CollisionJNI.btOptimizedBvh_SWIGUpcast(swigCPtr = cPtr), cMemoryOwn);
}
public static long getCPtr(btOptimizedBvh obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
@Override
protected void finalize() throws Throwable {
if (!destroyed)
destroy();
super.finalize();
}
@Override protected synchronized void delete() {
if (swigCPtr != 0) {
if (swigCMemOwn) {
swigCMemOwn = false;
CollisionJNI.delete_btOptimizedBvh(swigCPtr);
}
swigCPtr = 0;
}
super.delete();
}
public long operatorNew(long sizeInBytes) {
return CollisionJNI.btOptimizedBvh_operatorNew__SWIG_0(swigCPtr, this, sizeInBytes);
}
public void operatorDelete(long ptr) {
CollisionJNI.btOptimizedBvh_operatorDelete__SWIG_0(swigCPtr, this, ptr);
}
public long operatorNew(long arg0, long ptr) {
return CollisionJNI.btOptimizedBvh_operatorNew__SWIG_1(swigCPtr, this, arg0, ptr);
}
public void operatorDelete(long arg0, long arg1) {
CollisionJNI.btOptimizedBvh_operatorDelete__SWIG_1(swigCPtr, this, arg0, arg1);
}
public long operatorNewArray(long sizeInBytes) {
return CollisionJNI.btOptimizedBvh_operatorNewArray__SWIG_0(swigCPtr, this, sizeInBytes);
}
public void operatorDeleteArray(long ptr) {
CollisionJNI.btOptimizedBvh_operatorDeleteArray__SWIG_0(swigCPtr, this, ptr);
}
public long operatorNewArray(long arg0, long ptr) {
return CollisionJNI.btOptimizedBvh_operatorNewArray__SWIG_1(swigCPtr, this, arg0, ptr);
}
public void operatorDeleteArray(long arg0, long arg1) {
CollisionJNI.btOptimizedBvh_operatorDeleteArray__SWIG_1(swigCPtr, this, arg0, arg1);
}
public btOptimizedBvh() {
this(CollisionJNI.new_btOptimizedBvh(), true);
}
public void build(btStridingMeshInterface triangles, boolean useQuantizedAabbCompression, Vector3 bvhAabbMin, Vector3 bvhAabbMax) {
CollisionJNI.btOptimizedBvh_build(swigCPtr, this, btStridingMeshInterface.getCPtr(triangles), triangles, useQuantizedAabbCompression, bvhAabbMin, bvhAabbMax);
}
public void refit(btStridingMeshInterface triangles, Vector3 aabbMin, Vector3 aabbMax) {
CollisionJNI.btOptimizedBvh_refit(swigCPtr, this, btStridingMeshInterface.getCPtr(triangles), triangles, aabbMin, aabbMax);
}
public void refitPartial(btStridingMeshInterface triangles, Vector3 aabbMin, Vector3 aabbMax) {
CollisionJNI.btOptimizedBvh_refitPartial(swigCPtr, this, btStridingMeshInterface.getCPtr(triangles), triangles, aabbMin, aabbMax);
}
public void updateBvhNodes(btStridingMeshInterface meshInterface, int firstNode, int endNode, int index) {
CollisionJNI.btOptimizedBvh_updateBvhNodes(swigCPtr, this, btStridingMeshInterface.getCPtr(meshInterface), meshInterface, firstNode, endNode, index);
}
public boolean serializeInPlace(long o_alignedDataBuffer, long i_dataBufferSize, boolean i_swapEndian) {
return CollisionJNI.btOptimizedBvh_serializeInPlace(swigCPtr, this, o_alignedDataBuffer, i_dataBufferSize, i_swapEndian);
}
public static btOptimizedBvh deSerializeInPlace(long i_alignedDataBuffer, long i_dataBufferSize, boolean i_swapEndian) {
long cPtr = CollisionJNI.btOptimizedBvh_deSerializeInPlace(i_alignedDataBuffer, i_dataBufferSize, i_swapEndian);
return (cPtr == 0) ? null : new btOptimizedBvh(cPtr, false);
}
}
| apache-2.0 |
JSDemos/android-sdk-20 | src/org/apache/harmony/security/pkcs8/PrivateKeyInfo.java | 4403 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.security.pkcs8;
import java.util.List;
import org.apache.harmony.security.asn1.ASN1Implicit;
import org.apache.harmony.security.asn1.ASN1Integer;
import org.apache.harmony.security.asn1.ASN1OctetString;
import org.apache.harmony.security.asn1.ASN1Sequence;
import org.apache.harmony.security.asn1.ASN1SetOf;
import org.apache.harmony.security.asn1.ASN1Type;
import org.apache.harmony.security.asn1.BerInputStream;
import org.apache.harmony.security.x501.AttributeTypeAndValue;
import org.apache.harmony.security.x509.AlgorithmIdentifier;
/**
* The class implements the ASN.1 DER encoding and decoding of the PKCS#8
* PrivateKeyInfo having the following ASN.1 notation:
*
* PrivateKeyInfo ::= SEQUENCE {
* version Version,
* privateKeyAlgorithm PrivateKeyAlgorithmIdentifier,
* privateKey PrivateKey,
* attributes [0] IMPLICIT Attributes OPTIONAL }
*
* Version ::= INTEGER
*
* PrivateKeyAlgorithmIdentifier ::= AlgorithmIdentifier
*
* PrivateKey ::= OCTET STRING
*
* Attributes ::= SET OF Attribute
*/
public final class PrivateKeyInfo {
private final int version;
private final AlgorithmIdentifier privateKeyAlgorithm;
private final byte[] privateKey;
private final List<?> attributes;
private byte[] encoding;
public PrivateKeyInfo(int version, AlgorithmIdentifier privateKeyAlgorithm,
byte[] privateKey, List attributes) {
this.version = version;
this.privateKeyAlgorithm = privateKeyAlgorithm;
this.privateKey = privateKey;
this.attributes = attributes;
}
private PrivateKeyInfo(int version,
AlgorithmIdentifier privateKeyAlgorithm, byte[] privateKey,
List attributes, byte[] encoding) {
this(version, privateKeyAlgorithm, privateKey, attributes);
this.encoding = encoding;
}
public int getVersion() {
return version;
}
public AlgorithmIdentifier getAlgorithmIdentifier() {
return privateKeyAlgorithm;
}
public List getAttributes() {
return attributes;
}
/**
* Returns the OCTET STRING.
*/
public byte[] getPrivateKey() {
return privateKey;
}
/**
* Returns ASN.1 encoded form of this PrivateKeyInfo.
*/
public byte[] getEncoded() {
if (encoding == null) {
encoding = ASN1.encode(this);
}
return encoding;
}
public static final ASN1Sequence ASN1 = new ASN1Sequence(new ASN1Type[] {
ASN1Integer.getInstance(), // version
AlgorithmIdentifier.ASN1, // AlgorithmIdentifier
ASN1OctetString.getInstance(), // privateKey
new ASN1Implicit(0, new ASN1SetOf(AttributeTypeAndValue.ASN1)) // attributes
}) {
{
setOptional(3); // attributes are OPTIONAL
}
protected Object getDecodedObject(BerInputStream in) {
Object[] values = (Object[]) in.content;
return new PrivateKeyInfo(ASN1Integer.toIntValue(values[0]),
(AlgorithmIdentifier) values[1], (byte[]) values[2],
(List) values[3], in.getEncoded());
}
protected void getValues(Object object, Object[] values) {
PrivateKeyInfo privateKeyInfo = (PrivateKeyInfo) object;
values[0] = ASN1Integer.fromIntValue(privateKeyInfo.version);
values[1] = privateKeyInfo.privateKeyAlgorithm;
values[2] = privateKeyInfo.privateKey;
values[3] = privateKeyInfo.attributes;
}
};
}
| apache-2.0 |
priyatransbit/aws-sdk-java | aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/model/GetUserPolicyRequest.java | 7287 | /*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.identitymanagement.AmazonIdentityManagement#getUserPolicy(GetUserPolicyRequest) GetUserPolicy operation}.
* <p>
* Retrieves the specified inline policy document that is embedded in the
* specified user.
* </p>
* <p>
* A user can also have managed policies attached to it. To retrieve a
* managed policy document that is attached to a user, use GetPolicy to
* determine the policy's default version, then use GetPolicyVersion to
* retrieve the policy document.
* </p>
* <p>
* For more information about policies, refer to
* <a href="http://docs.aws.amazon.com/IAM/latest/UserGuide/policies-managed-vs-inline.html"> Managed Policies and Inline Policies </a>
* in the <i>IAM User Guide</i> .
* </p>
*
* @see com.amazonaws.services.identitymanagement.AmazonIdentityManagement#getUserPolicy(GetUserPolicyRequest)
*/
public class GetUserPolicyRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The name of the user who the policy is associated with.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 128<br/>
* <b>Pattern: </b>[\w+=,.@-]+<br/>
*/
private String userName;
/**
* The name of the policy document to get.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 128<br/>
* <b>Pattern: </b>[\w+=,.@-]+<br/>
*/
private String policyName;
/**
* Default constructor for a new GetUserPolicyRequest object. Callers should use the
* setter or fluent setter (with...) methods to initialize this object after creating it.
*/
public GetUserPolicyRequest() {}
/**
* Constructs a new GetUserPolicyRequest object.
* Callers should use the setter or fluent setter (with...) methods to
* initialize any additional object members.
*
* @param userName The name of the user who the policy is associated
* with.
* @param policyName The name of the policy document to get.
*/
public GetUserPolicyRequest(String userName, String policyName) {
setUserName(userName);
setPolicyName(policyName);
}
/**
* The name of the user who the policy is associated with.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 128<br/>
* <b>Pattern: </b>[\w+=,.@-]+<br/>
*
* @return The name of the user who the policy is associated with.
*/
public String getUserName() {
return userName;
}
/**
* The name of the user who the policy is associated with.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 128<br/>
* <b>Pattern: </b>[\w+=,.@-]+<br/>
*
* @param userName The name of the user who the policy is associated with.
*/
public void setUserName(String userName) {
this.userName = userName;
}
/**
* The name of the user who the policy is associated with.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 128<br/>
* <b>Pattern: </b>[\w+=,.@-]+<br/>
*
* @param userName The name of the user who the policy is associated with.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetUserPolicyRequest withUserName(String userName) {
this.userName = userName;
return this;
}
/**
* The name of the policy document to get.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 128<br/>
* <b>Pattern: </b>[\w+=,.@-]+<br/>
*
* @return The name of the policy document to get.
*/
public String getPolicyName() {
return policyName;
}
/**
* The name of the policy document to get.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 128<br/>
* <b>Pattern: </b>[\w+=,.@-]+<br/>
*
* @param policyName The name of the policy document to get.
*/
public void setPolicyName(String policyName) {
this.policyName = policyName;
}
/**
* The name of the policy document to get.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>1 - 128<br/>
* <b>Pattern: </b>[\w+=,.@-]+<br/>
*
* @param policyName The name of the policy document to get.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetUserPolicyRequest withPolicyName(String policyName) {
this.policyName = policyName;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getUserName() != null) sb.append("UserName: " + getUserName() + ",");
if (getPolicyName() != null) sb.append("PolicyName: " + getPolicyName() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getUserName() == null) ? 0 : getUserName().hashCode());
hashCode = prime * hashCode + ((getPolicyName() == null) ? 0 : getPolicyName().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof GetUserPolicyRequest == false) return false;
GetUserPolicyRequest other = (GetUserPolicyRequest)obj;
if (other.getUserName() == null ^ this.getUserName() == null) return false;
if (other.getUserName() != null && other.getUserName().equals(this.getUserName()) == false) return false;
if (other.getPolicyName() == null ^ this.getPolicyName() == null) return false;
if (other.getPolicyName() != null && other.getPolicyName().equals(this.getPolicyName()) == false) return false;
return true;
}
@Override
public GetUserPolicyRequest clone() {
return (GetUserPolicyRequest) super.clone();
}
}
| apache-2.0 |
MetSystem/jbpm | jbpm-flow-builder/src/main/java/org/jbpm/compiler/xml/XmlWorkflowProcessDumper.java | 12425 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.compiler.xml;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.drools.compiler.compiler.xml.XmlDumper;
import org.kie.api.definition.process.Connection;
import org.kie.api.definition.process.Node;
import org.kie.api.definition.process.WorkflowProcess;
import org.drools.core.process.core.datatype.DataType;
import org.drools.core.process.core.datatype.impl.type.ObjectDataType;
import org.drools.core.xml.Handler;
import org.drools.core.xml.SemanticModule;
import org.jbpm.compiler.xml.processes.AbstractNodeHandler;
import org.jbpm.process.core.context.exception.ActionExceptionHandler;
import org.jbpm.process.core.context.exception.ExceptionHandler;
import org.jbpm.process.core.context.exception.ExceptionScope;
import org.jbpm.process.core.context.swimlane.Swimlane;
import org.jbpm.process.core.context.swimlane.SwimlaneContext;
import org.jbpm.process.core.context.variable.Variable;
import org.jbpm.process.core.context.variable.VariableScope;
import org.jbpm.workflow.core.DroolsAction;
import org.jbpm.workflow.core.impl.NodeImpl;
public class XmlWorkflowProcessDumper {
private final static String EOL = System.getProperty( "line.separator" );
private String type;
private String namespace;
private String schemaLocation;
private SemanticModule semanticModule;
public XmlWorkflowProcessDumper(String type, String namespace, String schemaLocation, SemanticModule semanticModule) {
this.type = type;
this.namespace = namespace;
this.schemaLocation = schemaLocation;
this.semanticModule = semanticModule;
}
public String dump(WorkflowProcess process) {
return dump(process, true);
}
public String dump(WorkflowProcess process, boolean includeMeta) {
StringBuilder xmlDump = new StringBuilder();
visitProcess(process, xmlDump, includeMeta);
return xmlDump.toString();
}
protected void visitProcess(WorkflowProcess process, StringBuilder xmlDump, boolean includeMeta) {
xmlDump.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?> " + EOL
+ "<process xmlns=\"" + namespace + "\"" + EOL
+ " xmlns:xs=\"http://www.w3.org/2001/XMLSchema-instance\"" + EOL
+ " xs:schemaLocation=\"" + namespace + " " + schemaLocation + "\"" + EOL
+ " type=\"" + type + "\" ");
if (process.getName() != null) {
xmlDump.append("name=\"" + process.getName() + "\" ");
}
if (process.getId() != null) {
xmlDump.append("id=\"" + process.getId() + "\" ");
}
if (process.getPackageName() != null) {
xmlDump.append("package-name=\"" + process.getPackageName() + "\" ");
}
if (process.getVersion() != null) {
xmlDump.append("version=\"" + process.getVersion() + "\" ");
}
if (includeMeta) {
Integer routerLayout = (Integer) process.getMetaData().get("routerLayout");
if (routerLayout != null && routerLayout != 0) {
xmlDump.append("routerLayout=\"" + routerLayout + "\" ");
}
}
xmlDump.append(">" + EOL + EOL);
visitHeader(process, xmlDump, includeMeta);
visitNodes(process, xmlDump, includeMeta);
visitConnections(process.getNodes(), xmlDump, includeMeta);
xmlDump.append("</process>");
}
protected void visitHeader(WorkflowProcess process, StringBuilder xmlDump, boolean includeMeta) {
xmlDump.append(" <header>" + EOL);
visitImports(((org.jbpm.process.core.Process) process).getImports(), xmlDump);
visitGlobals(((org.jbpm.process.core.Process) process).getGlobals(), xmlDump);
visitFunctionImports(((org.jbpm.process.core.Process) process).getFunctionImports(), xmlDump);
VariableScope variableScope = (VariableScope)
((org.jbpm.process.core.Process) process).getDefaultContext(VariableScope.VARIABLE_SCOPE);
if (variableScope != null) {
visitVariables(variableScope.getVariables(), xmlDump);
}
SwimlaneContext swimlaneContext = (SwimlaneContext)
((org.jbpm.process.core.Process) process).getDefaultContext(SwimlaneContext.SWIMLANE_SCOPE);
if (swimlaneContext != null) {
visitSwimlanes(swimlaneContext.getSwimlanes(), xmlDump);
}
ExceptionScope exceptionScope = (ExceptionScope)
((org.jbpm.process.core.Process) process).getDefaultContext(ExceptionScope.EXCEPTION_SCOPE);
if (exceptionScope != null) {
visitExceptionHandlers(exceptionScope.getExceptionHandlers(), xmlDump);
}
xmlDump.append(" </header>" + EOL + EOL);
}
private void visitImports(Collection<String> imports, StringBuilder xmlDump) {
if (imports != null && imports.size() > 0) {
xmlDump.append(" <imports>" + EOL);
for (String importString: imports) {
xmlDump.append(" <import name=\"" + importString + "\" />" + EOL);
}
xmlDump.append(" </imports>" + EOL);
}
}
private void visitFunctionImports(List<String> imports, StringBuilder xmlDump) {
if (imports != null && imports.size() > 0) {
xmlDump.append(" <functionImports>" + EOL);
for (String importString: imports) {
xmlDump.append(" <functionImport name=\"" + importString + "\" />" + EOL);
}
xmlDump.append(" </functionImports>" + EOL);
}
}
private void visitGlobals(Map<String, String> globals, StringBuilder xmlDump) {
if (globals != null && globals.size() > 0) {
xmlDump.append(" <globals>" + EOL);
for (Map.Entry<String, String> global: globals.entrySet()) {
xmlDump.append(" <global identifier=\"" + global.getKey() + "\" type=\"" + global.getValue() + "\" />" + EOL);
}
xmlDump.append(" </globals>" + EOL);
}
}
public static void visitVariables(List<Variable> variables, StringBuilder xmlDump) {
if (variables != null && variables.size() > 0) {
xmlDump.append(" <variables>" + EOL);
for (Variable variable: variables) {
xmlDump.append(" <variable name=\"" + variable.getName() + "\" >" + EOL);
visitDataType(variable.getType(), xmlDump);
Object value = variable.getValue();
if (value != null) {
visitValue(variable.getValue(), variable.getType(), xmlDump);
}
xmlDump.append(" </variable>" + EOL);
}
xmlDump.append(" </variables>" + EOL);
}
}
private void visitSwimlanes(Collection<Swimlane> swimlanes, StringBuilder xmlDump) {
if (swimlanes != null && swimlanes.size() > 0) {
xmlDump.append(" <swimlanes>" + EOL);
for (Swimlane swimlane: swimlanes) {
xmlDump.append(" <swimlane name=\"" + swimlane.getName() + "\" />" + EOL);
}
xmlDump.append(" </swimlanes>" + EOL);
}
}
public static void visitExceptionHandlers(Map<String, ExceptionHandler> exceptionHandlers, StringBuilder xmlDump) {
if (exceptionHandlers != null && exceptionHandlers.size() > 0) {
xmlDump.append(" <exceptionHandlers>" + EOL);
for (Map.Entry<String, ExceptionHandler> entry: exceptionHandlers.entrySet()) {
ExceptionHandler exceptionHandler = entry.getValue();
if (exceptionHandler instanceof ActionExceptionHandler) {
ActionExceptionHandler actionExceptionHandler = (ActionExceptionHandler) exceptionHandler;
xmlDump.append(" <exceptionHandler faultName=\"" + entry.getKey() + "\" type=\"action\" ");
String faultVariable = actionExceptionHandler.getFaultVariable();
if (faultVariable != null && faultVariable.length() > 0) {
xmlDump.append("faultVariable=\"" + faultVariable + "\" ");
}
xmlDump.append(">" + EOL);
DroolsAction action = actionExceptionHandler.getAction();
if (action != null) {
AbstractNodeHandler.writeAction(action, xmlDump);
}
xmlDump.append(" </exceptionHandler>" + EOL);
} else {
throw new IllegalArgumentException("Unknown exception handler type: " + exceptionHandler);
}
}
xmlDump.append(" </exceptionHandlers>" + EOL);
}
}
public static void visitDataType(DataType dataType, StringBuilder xmlDump) {
xmlDump.append(" <type name=\"" + dataType.getClass().getName() + "\" ");
// TODO make this pluggable so datatypes can write out other properties as well
if (dataType instanceof ObjectDataType) {
String className = ((ObjectDataType) dataType).getClassName();
if (className != null
&& className.trim().length() > 0
&& !"java.lang.Object".equals(className)) {
xmlDump.append("className=\"" + className + "\" ");
}
}
xmlDump.append("/>" + EOL);
}
public static void visitValue(Object value, DataType dataType, StringBuilder xmlDump) {
xmlDump.append(" <value>" + XmlDumper.replaceIllegalChars(dataType.writeValue(value)) + "</value>" + EOL);
}
private void visitNodes(WorkflowProcess process, StringBuilder xmlDump, boolean includeMeta) {
xmlDump.append(" <nodes>" + EOL);
for (Node node: process.getNodes()) {
visitNode(node, xmlDump, includeMeta);
}
xmlDump.append(" </nodes>" + EOL + EOL);
}
public void visitNode(Node node, StringBuilder xmlDump, boolean includeMeta) {
Handler handler = semanticModule.getHandlerByClass(node.getClass());
if (handler != null) {
((AbstractNodeHandler) handler).writeNode((org.jbpm.workflow.core.Node) node, xmlDump, includeMeta);
} else {
throw new IllegalArgumentException(
"Unknown node type: " + node);
}
}
private void visitConnections(Node[] nodes, StringBuilder xmlDump, boolean includeMeta) {
List<Connection> connections = new ArrayList<Connection>();
for (Node node: nodes) {
for (List<Connection> connectionList: node.getIncomingConnections().values()) {
connections.addAll(connectionList);
}
}
xmlDump.append(" <connections>" + EOL);
for (Connection connection: connections) {
visitConnection(connection, xmlDump, includeMeta);
}
xmlDump.append(" </connections>" + EOL + EOL);
}
public void visitConnection(Connection connection, StringBuilder xmlDump, boolean includeMeta) {
xmlDump.append(" <connection from=\"" + connection.getFrom().getId() + "\" ");
if (!NodeImpl.CONNECTION_DEFAULT_TYPE.equals(connection.getFromType())) {
xmlDump.append("fromType=\"" + connection.getFromType() + "\" ");
}
xmlDump.append("to=\"" + connection.getTo().getId() + "\" ");
if (!NodeImpl.CONNECTION_DEFAULT_TYPE.equals(connection.getToType())) {
xmlDump.append("toType=\"" + connection.getToType() + "\" ");
}
if (includeMeta) {
String bendpoints = (String) connection.getMetaData().get("bendpoints");
if (bendpoints != null) {
xmlDump.append("bendpoints=\"" + bendpoints + "\" ");
}
}
xmlDump.append("/>" + EOL);
}
}
| apache-2.0 |
jomarko/kie-wb-common | kie-wb-common-screens/kie-wb-common-project-explorer/kie-wb-common-project-explorer-backend/src/main/java/org/kie/workbench/common/screens/explorer/backend/server/restrictor/ProjectRequiredPathsRestrictor.java | 4097 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.screens.explorer.backend.server.restrictor;
import java.io.File;
import javax.enterprise.context.ApplicationScoped;
import org.uberfire.backend.vfs.Path;
import org.uberfire.ext.editor.commons.service.restriction.PathOperationRestriction;
import org.uberfire.ext.editor.commons.service.restrictor.DeleteRestrictor;
import org.uberfire.ext.editor.commons.service.restrictor.RenameRestrictor;
@ApplicationScoped
public class ProjectRequiredPathsRestrictor implements DeleteRestrictor,
RenameRestrictor {
private enum Rule {
POM_XML( RuleType.ENDS_WITH, "/pom.xml" ),
SRC( RuleType.ENDS_WITH, "/src" ),
SRC_MAIN( RuleType.ENDS_WITH, "/src/main" ),
SRC_MAIN_JAVA( RuleType.ENDS_WITH, "/src/main/java" ),
SRC_MAIN_RESOURCES( RuleType.ENDS_WITH, "/src/main/resources" ),
SRC_MAIN_META_INF( RuleType.ENDS_WITH, "/src/main/resources/META-INF" ),
SRC_MAIN_META_INF_KMODULE_XML( RuleType.ENDS_WITH, "/src/main/resources/META-INF/kmodule.xml" ),
SRC_TEST( RuleType.ENDS_WITH, "/src/test" ),
SRC_TEST_JAVA( RuleType.ENDS_WITH, "/src/test/java" ),
SRC_TEST_RESOURCES( RuleType.ENDS_WITH, "/src/test/resources" );
private RuleType type;
private String expression;
Rule( RuleType type, String expression ) {
this.type = type;
this.expression = expression;
}
public RuleType getType() {
return this.type;
}
public String getExpression() {
return this.expression;
}
public boolean check( String text ) {
return type.check( text, expression );
}
}
@Override
public PathOperationRestriction hasRestriction( final Path path ) {
if ( isRequiredPath( path ) ) {
return new PathOperationRestriction() {
@Override
public String getMessage( final Path path ) {
return path.toURI() + " cannot be deleted, renamed or moved, because it is a required project file or directory.";
}
};
}
return null;
}
private boolean isRequiredPath( final Path path ) {
final String text = removeLastSeparatorIfExists( path.toURI() );
for ( Rule rule : Rule.values()) {
if ( rule.check( text ) ) {
return true;
}
}
return false;
}
private String removeLastSeparatorIfExists( String text ) {
if ( text.length() > 1 && text.endsWith( File.separator ) ) {
text = text.substring( 0, text.length() - 1 );
}
return text;
}
private enum RuleType {
STARTS_WITH {
@Override
public boolean check( String text, String expression ) {
return text != null && text.startsWith( expression );
}
}, CONTAINS {
@Override
public boolean check( String text, String expression ) {
return text != null && text.contains( expression );
}
}, ENDS_WITH {
@Override
public boolean check( String text, String expression ) {
return text != null && text.endsWith( expression );
}
};
public abstract boolean check( String text, String expression );
}
}
| apache-2.0 |
AltitudeDigital/metrics | metrics-core/src/main/java/io/dropwizard/metrics/Sampling.java | 277 | package io.dropwizard.metrics;
import io.dropwizard.metrics.Snapshot;
/**
* An object which samples values.
*/
public interface Sampling {
/**
* Returns a snapshot of the values.
*
* @return a snapshot of the values
*/
Snapshot getSnapshot();
}
| apache-2.0 |
syl20bnr/jenkins | core/src/main/java/hudson/slaves/RetentionStrategy.java | 10414 | /*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Stephen Connolly
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.slaves;
import hudson.ExtensionPoint;
import hudson.Util;
import hudson.DescriptorExtensionList;
import hudson.Extension;
import hudson.model.*;
import hudson.model.Queue.*;
import hudson.util.DescriptorList;
import java.util.Collections;
import java.util.HashMap;
import jenkins.model.Jenkins;
import org.kohsuke.stapler.DataBoundConstructor;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Controls when to take {@link Computer} offline, bring it back online, or even to destroy it.
*
* @author Stephen Connolly
* @author Kohsuke Kawaguchi
*/
public abstract class RetentionStrategy<T extends Computer> extends AbstractDescribableImpl<RetentionStrategy<?>> implements ExtensionPoint {
/**
* This method will be called periodically to allow this strategy to decide what to do with it's owning slave.
*
* @param c {@link Computer} for which this strategy is assigned. This computer may be online or offline.
* This object also exposes a bunch of properties that the callee can use to decide what action to take.
* @return The number of minutes after which the strategy would like to be checked again. The strategy may be
* rechecked earlier or later that this!
*/
public abstract long check(T c);
/**
* This method is called to determine whether manual launching of the slave is allowed at this point in time.
* @param c {@link Computer} for which this strategy is assigned. This computer may be online or offline.
* This object also exposes a bunch of properties that the callee can use to decide if manual launching is
* allowed at this time.
* @return {@code true} if manual launching of the slave is allowed at this point in time.
*/
public boolean isManualLaunchAllowed(T c) {
return true;
}
/**
* Called when a new {@link Computer} object is introduced (such as when Hudson started, or when
* a new slave is added.)
*
* <p>
* The default implementation of this method delegates to {@link #check(Computer)},
* but this allows {@link RetentionStrategy} to distinguish the first time invocation from the rest.
*
* @since 1.275
*/
public void start(T c) {
check(c);
}
/**
* Returns all the registered {@link RetentionStrategy} descriptors.
*/
public static DescriptorExtensionList<RetentionStrategy<?>,Descriptor<RetentionStrategy<?>>> all() {
return (DescriptorExtensionList) Jenkins.getInstance().getDescriptorList(RetentionStrategy.class);
}
/**
* All registered {@link RetentionStrategy} implementations.
* @deprecated as of 1.286
* Use {@link #all()} for read access, and {@link Extension} for registration.
*/
public static final DescriptorList<RetentionStrategy<?>> LIST = new DescriptorList<RetentionStrategy<?>>((Class)RetentionStrategy.class);
/**
* Dummy instance that doesn't do any attempt to retention.
*/
public static final RetentionStrategy<Computer> NOOP = new RetentionStrategy<Computer>() {
public long check(Computer c) {
return 60;
}
@Override
public void start(Computer c) {
c.connect(false);
}
@Override
public Descriptor<RetentionStrategy<?>> getDescriptor() {
return DESCRIPTOR;
}
private final DescriptorImpl DESCRIPTOR = new DescriptorImpl();
class DescriptorImpl extends Descriptor<RetentionStrategy<?>> {
public String getDisplayName() {
return "";
}
}
};
/**
* Convenient singleton instance, since this {@link RetentionStrategy} is stateless.
*/
public static final Always INSTANCE = new Always();
/**
* {@link RetentionStrategy} that tries to keep the node online all the time.
*/
public static class Always extends RetentionStrategy<SlaveComputer> {
/**
* Constructs a new Always.
*/
@DataBoundConstructor
public Always() {
}
public long check(SlaveComputer c) {
if (c.isOffline() && !c.isConnecting() && c.isLaunchSupported())
c.tryReconnect();
return 1;
}
@Extension(ordinal=100)
public static class DescriptorImpl extends Descriptor<RetentionStrategy<?>> {
public String getDisplayName() {
return Messages.RetentionStrategy_Always_displayName();
}
}
}
/**
* {@link hudson.slaves.RetentionStrategy} that tries to keep the node offline when not in use.
*/
public static class Demand extends RetentionStrategy<SlaveComputer> {
private static final Logger logger = Logger.getLogger(Demand.class.getName());
/**
* The delay (in minutes) for which the slave must be in demand before tring to launch it.
*/
private final long inDemandDelay;
/**
* The delay (in minutes) for which the slave must be idle before taking it offline.
*/
private final long idleDelay;
@DataBoundConstructor
public Demand(long inDemandDelay, long idleDelay) {
this.inDemandDelay = Math.max(0, inDemandDelay);
this.idleDelay = Math.max(1, idleDelay);
}
/**
* Getter for property 'inDemandDelay'.
*
* @return Value for property 'inDemandDelay'.
*/
public long getInDemandDelay() {
return inDemandDelay;
}
/**
* Getter for property 'idleDelay'.
*
* @return Value for property 'idleDelay'.
*/
public long getIdleDelay() {
return idleDelay;
}
public synchronized long check(SlaveComputer c) {
if (c.isOffline() && c.isLaunchSupported()) {
final HashMap<Computer, Integer> availableComputers = new HashMap<Computer, Integer>();
for (Computer o : Jenkins.getInstance().getComputers()) {
if ((o.isOnline() || o.isConnecting()) && o.isPartiallyIdle()) {
final int idleExecutors = o.countIdle();
if (idleExecutors>0)
availableComputers.put(o, idleExecutors);
}
}
boolean needComputer = false;
long demandMilliseconds = 0;
for (Queue.BuildableItem item : Queue.getInstance().getBuildableItems()) {
// can any of the currently idle executors take this task?
// assume the answer is no until we can find such an executor
boolean needExecutor = true;
for (Computer o : Collections.unmodifiableSet(availableComputers.keySet())) {
if (o.getNode().canTake(item) == null) {
needExecutor = false;
final int availableExecutors = availableComputers.remove(o);
if (availableExecutors > 1) {
availableComputers.put(o, availableExecutors - 1);
} else {
availableComputers.remove(o);
}
break;
}
}
// this 'item' cannot be built by any of the existing idle nodes, but it can be built by 'c'
if (needExecutor && c.getNode().canTake(item) == null) {
demandMilliseconds = System.currentTimeMillis() - item.buildableStartMilliseconds;
needComputer = demandMilliseconds > inDemandDelay * 1000 * 60 /*MINS->MILLIS*/;
break;
}
}
if (needComputer) {
// we've been in demand for long enough
logger.log(Level.INFO, "Launching computer {0} as it has been in demand for {1}",
new Object[]{c.getName(), Util.getTimeSpanString(demandMilliseconds)});
c.connect(false);
}
} else if (c.isIdle()) {
final long idleMilliseconds = System.currentTimeMillis() - c.getIdleStartMilliseconds();
if (idleMilliseconds > idleDelay * 1000 * 60 /*MINS->MILLIS*/) {
// we've been idle for long enough
logger.log(Level.INFO, "Disconnecting computer {0} as it has been idle for {1}",
new Object[]{c.getName(), Util.getTimeSpanString(idleMilliseconds)});
c.disconnect(OfflineCause.create(Messages._RetentionStrategy_Demand_OfflineIdle()));
}
}
return 1;
}
@Extension
public static class DescriptorImpl extends Descriptor<RetentionStrategy<?>> {
public String getDisplayName() {
return Messages.RetentionStrategy_Demand_displayName();
}
}
}
}
| mit |
andre-nunes/fenixedu-academic | src/main/java/org/fenixedu/academic/ui/struts/action/student/administrativeOfficeServices/ViewDocumentRequestsDA.java | 5798 | /**
* Copyright © 2002 Instituto Superior Técnico
*
* This file is part of FenixEdu Academic.
*
* FenixEdu Academic is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* FenixEdu Academic is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>.
*/
package org.fenixedu.academic.ui.struts.action.student.administrativeOfficeServices;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.DynaActionForm;
import org.fenixedu.academic.domain.exceptions.DomainException;
import org.fenixedu.academic.domain.exceptions.DomainExceptionWithLabelFormatter;
import org.fenixedu.academic.domain.serviceRequests.AcademicServiceRequest;
import org.fenixedu.academic.domain.student.Registration;
import org.fenixedu.academic.predicate.AccessControl;
import org.fenixedu.academic.service.services.exceptions.FenixServiceException;
import org.fenixedu.academic.ui.struts.action.base.FenixDispatchAction;
import org.fenixedu.bennu.struts.annotations.Forward;
import org.fenixedu.bennu.struts.annotations.Forwards;
import org.fenixedu.bennu.struts.annotations.Mapping;
import org.fenixedu.bennu.struts.portal.EntryPoint;
import org.fenixedu.bennu.struts.portal.StrutsFunctionality;
import pt.ist.fenixframework.FenixFramework;
@StrutsFunctionality(app = StudentAcademicOfficeServices.class, path = "view-document-requests",
titleKey = "documents.requirement.consult")
@Mapping(path = "/viewDocumentRequests", module = "student", formBean = "documentRequestCreateForm")
@Forwards(value = {
@Forward(name = "viewDocumentRequests",
path = "/student/administrativeOfficeServices/documentRequest/viewDocumentRequests.jsp"),
@Forward(name = "prepareCancelAcademicServiceRequest",
path = "/student/administrativeOfficeServices/documentRequest/prepareCancelAcademicServiceRequest.jsp"),
@Forward(name = "cancelSuccess", path = "/student/administrativeOfficeServices/documentRequest/cancelSuccess.jsp"),
@Forward(name = "viewDocumentRequest",
path = "/student/administrativeOfficeServices/documentRequest/viewDocumentRequest.jsp") })
public class ViewDocumentRequestsDA extends FenixDispatchAction {
@EntryPoint
public ActionForward viewDocumentRequests(ActionMapping mapping, ActionForm actionForm, HttpServletRequest request,
HttpServletResponse response) {
request.setAttribute("student", getLoggedPerson(request).getStudent());
request.setAttribute("documentRequests", getDocumentRequest());
return mapping.findForward("viewDocumentRequests");
}
private List<AcademicServiceRequest> getDocumentRequest() {
final List<AcademicServiceRequest> result = new ArrayList<AcademicServiceRequest>();
for (final Registration registration : AccessControl.getPerson().getStudent().getRegistrationsSet()) {
result.addAll(registration.getAcademicServiceRequestsSet());
}
return result;
}
public ActionForward viewDocumentRequest(ActionMapping mapping, ActionForm actionForm, HttpServletRequest request,
HttpServletResponse response) {
request.setAttribute("documentRequest", FenixFramework.getDomainObject(request.getParameter("documentRequestId")));
return mapping.findForward("viewDocumentRequest");
}
public ActionForward prepareCancelAcademicServiceRequest(ActionMapping mapping, ActionForm actionForm,
HttpServletRequest request, HttpServletResponse response) throws FenixServiceException {
getAndSetAcademicServiceRequest(request);
return mapping.findForward("prepareCancelAcademicServiceRequest");
}
public ActionForward cancelAcademicServiceRequest(ActionMapping mapping, ActionForm actionForm, HttpServletRequest request,
HttpServletResponse response) throws FenixServiceException {
final AcademicServiceRequest academicServiceRequest = getAndSetAcademicServiceRequest(request);
final String justification = ((DynaActionForm) actionForm).getString("justification");
try {
academicServiceRequest.cancel(justification);
} catch (DomainExceptionWithLabelFormatter ex) {
addActionMessage(request, ex.getKey(), solveLabelFormatterArgs(request, ex.getLabelFormatterArgs()));
return mapping.findForward("prepareCancelAcademicServiceRequest");
} catch (DomainException ex) {
addActionMessage(request, ex.getKey());
return mapping.findForward("prepareCancelAcademicServiceRequest");
}
return mapping.findForward("cancelSuccess");
}
private AcademicServiceRequest getAndSetAcademicServiceRequest(final HttpServletRequest request) {
final AcademicServiceRequest academicServiceRequest =
FenixFramework.getDomainObject(request.getParameter("academicServiceRequestId"));
request.setAttribute("academicServiceRequest", academicServiceRequest);
return academicServiceRequest;
}
}
| lgpl-3.0 |
vgmartinez/incubator-zeppelin | lens/src/main/java/org/apache/zeppelin/lens/LensInterpreter.java | 15989 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.lens;
import java.util.List;
import java.util.Properties;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.util.Map;
import java.util.LinkedHashMap;
import java.util.concurrent.ConcurrentHashMap;
import java.io.ByteArrayOutputStream;
import org.apache.lens.client.LensClient;
import org.apache.lens.client.LensClientConfig;
import org.apache.lens.client.LensClientSingletonWrapper;
import org.apache.lens.cli.commands.BaseLensCommand;
import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.scheduler.SchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.shell.Bootstrap;
import org.springframework.shell.core.CommandResult;
import org.springframework.shell.core.JLineShell;
import org.springframework.shell.core.JLineShellComponent;
import org.springframework.shell.support.logging.HandlerUtils;
/**
* Lens interpreter for Zeppelin.
*/
public class LensInterpreter extends Interpreter {
static final Logger s_logger = LoggerFactory.getLogger(LensInterpreter.class);
static final String LENS_CLIENT_DBNAME = "lens.client.dbname";
static final String LENS_SERVER_URL = "lens.server.base.url";
static final String LENS_SESSION_CLUSTER_USER = "lens.session.cluster.user";
static final String LENS_PERSIST_RESULTSET = "lens.query.enable.persistent.resultset";
static final String ZEPPELIN_LENS_RUN_CONCURRENT_SESSION = "zeppelin.lens.run.concurrent";
static final String ZEPPELIN_LENS_CONCURRENT_SESSIONS = "zeppelin.lens.maxThreads";
static final String ZEPPELIN_MAX_ROWS = "zeppelin.lens.maxResults";
static final Map<String, Pattern> LENS_TABLE_FORMAT_REGEX = new LinkedHashMap<String, Pattern>() {
{
put("cubes", Pattern.compile(".*show\\s+cube.*"));
put("nativetables", Pattern.compile(".*show\\s+nativetable.*"));
put("storages", Pattern.compile(".*show\\s+storage.*"));
put("facts", Pattern.compile(".*show\\s+fact.*"));
put("dimensions", Pattern.compile(".*show\\s+dimension.*"));
put("params", Pattern.compile(".*show\\s+param.*"));
put("databases", Pattern.compile(".*show\\s+database.*"));
put("query results", Pattern.compile(".*query\\s+results.*"));
}
};
private static Pattern s_queryExecutePattern = Pattern.compile(".*query\\s+execute\\s+(.*)");
private static Map<String, ExecutionDetail> s_paraToQH =
new ConcurrentHashMap<> (); //tracks paragraphId -> Lens QueryHandle
private static Map<LensClient, Boolean> s_clientMap =
new ConcurrentHashMap<>();
private int m_maxResults;
private int m_maxThreads;
private JLineShell m_shell;
private LensClientConfig m_lensConf;
private Bootstrap m_bs;
private LensClient m_lensClient;
public LensInterpreter(Properties property) {
super(property);
try {
m_lensConf = new LensClientConfig();
m_lensConf.set(LENS_SERVER_URL, property.get(LENS_SERVER_URL).toString());
m_lensConf.set(LENS_CLIENT_DBNAME, property.get(LENS_CLIENT_DBNAME).toString());
m_lensConf.set(LENS_SESSION_CLUSTER_USER, property.get(LENS_SESSION_CLUSTER_USER).toString());
m_lensConf.set(LENS_PERSIST_RESULTSET, property.get(LENS_PERSIST_RESULTSET).toString());
try {
m_maxResults = Integer.parseInt(property.get(ZEPPELIN_MAX_ROWS).toString());
} catch (NumberFormatException|NullPointerException e) {
m_maxResults = 1000;
s_logger.error("unable to parse " + ZEPPELIN_MAX_ROWS + " :"
+ property.get(ZEPPELIN_MAX_ROWS), e);
}
try {
m_maxThreads = Integer.parseInt(property.get(ZEPPELIN_LENS_CONCURRENT_SESSIONS).toString());
} catch (NumberFormatException|NullPointerException e) {
m_maxThreads = 10;
s_logger.error("unable to parse " + ZEPPELIN_LENS_CONCURRENT_SESSIONS + " :"
+ property.get(ZEPPELIN_LENS_CONCURRENT_SESSIONS), e);
}
s_logger.info("LensInterpreter created");
}
catch (Exception e) {
s_logger.error(e.toString(), e);
s_logger.error("unable to create lens interpreter", e);
}
}
private Bootstrap createBootstrap() {
return new LensBootstrap();
}
private JLineShell getJLineShell(Bootstrap bs) {
if (bs instanceof LensBootstrap) {
return ((LensBootstrap) bs).getLensJLineShellComponent();
} else {
return bs.getJLineShellComponent();
}
}
protected void init() {
try {
m_bs = createBootstrap();
m_shell = getJLineShell(m_bs);
} catch (Exception ex) {
s_logger.error("could not initialize commandLine", ex);
}
}
@Override
public void open() {
s_logger.info("LensInterpreter opening");
m_lensClient = new LensClient(m_lensConf);
LensClientSingletonWrapper.instance().setClient(m_lensClient);
init();
s_logger.info("LensInterpreter opened");
}
@Override
public void close() {
closeConnections();
s_logger.info("LensInterpreter closed");
}
private static void closeConnections() {
for (LensClient cl : s_clientMap.keySet()) {
if (cl.isConnectionOpen()) {
closeLensClient(cl);
}
}
}
private static void closeLensClient(LensClient lensClient) {
try {
lensClient.closeConnection();
} catch (Exception e) {
s_logger.error("unable to close lensClient", e);
}
}
private LensClient createAndSetLensClient(Bootstrap bs) {
LensClient lensClient = null;
try {
lensClient = new LensClient(m_lensConf);
for (String beanName : bs.getApplicationContext().getBeanDefinitionNames()) {
if (bs.getApplicationContext().getBean(beanName) instanceof BaseLensCommand) {
((BaseLensCommand) bs.getApplicationContext().getBean(beanName))
.setClient(lensClient);
}
}
} catch (Exception e) {
s_logger.error("unable to create lens client", e);
throw e;
}
return lensClient;
}
private InterpreterResult HandleHelp(JLineShell shell, String st) {
java.util.logging.StreamHandler sh = null;
java.util.logging.Logger springLogger = null;
java.util.logging.Formatter formatter = new java.util.logging.Formatter() {
public String format(java.util.logging.LogRecord record) {
return record.getMessage();
}
};
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
sh = new java.util.logging.StreamHandler(baos, formatter);
springLogger = HandlerUtils.getLogger(org.springframework.shell.core.SimpleParser.class);
springLogger.addHandler(sh);
shell.executeCommand(st);
} catch (Exception e) {
s_logger.error(e.getMessage(), e);
return new InterpreterResult(Code.ERROR, e.getMessage());
}
finally {
sh.flush();
springLogger.removeHandler(sh);
sh.close();
}
return new InterpreterResult(Code.SUCCESS, baos.toString());
}
private String modifyQueryStatement(String st) {
Matcher matcher = s_queryExecutePattern.matcher(st.toLowerCase());
if (!matcher.find()) {
return st;
}
StringBuilder sb = new StringBuilder("query execute ");
if (!st.toLowerCase().matches(".*--async\\s+true")) {
sb.append("--async true ");
}
sb.append(matcher.group(1));
if (!st.toLowerCase().matches(".*limit\\s+\\d+.*")) {
sb.append(" limit ");
sb.append(m_maxResults);
}
return sb.toString();
}
@Override
public InterpreterResult interpret(String input, InterpreterContext context) {
if (input == null || input.length() == 0) {
return new InterpreterResult(Code.ERROR, "no command submitted");
}
String st = input.replaceAll("\\n", " ");
s_logger.info("LensInterpreter command: " + st);
Bootstrap bs = createBootstrap();
JLineShell shell = getJLineShell(bs);
CommandResult res = null;
LensClient lensClient = null;
String qh = null;
if (st.trim().startsWith("help")) {
return HandleHelp(shell, st);
}
try {
lensClient = createAndSetLensClient(bs);
s_clientMap.put(lensClient, true);
String lensCommand = modifyQueryStatement(st);
s_logger.info("executing command : " + lensCommand);
res = shell.executeCommand(lensCommand);
if (!lensCommand.equals(st) && res != null
&& res.getResult() != null
&& res.getResult().toString().trim().matches("[a-z0-9-]+")) {
// setup query progress tracking
qh = res.getResult().toString();
s_paraToQH.put(context.getParagraphId(),
new ExecutionDetail(qh, lensClient, shell));
String getResultsCmd = "query results --async false " + qh;
s_logger.info("executing query results command : " + context.getParagraphId()
+ " : " + getResultsCmd);
res = shell.executeCommand(getResultsCmd);
s_paraToQH.remove(context.getParagraphId());
}
} catch (Exception ex) {
s_logger.error("error in interpret", ex);
return new InterpreterResult(Code.ERROR, ex.getMessage());
}
finally {
if (shell != null) {
closeShell(shell);
}
if (lensClient != null) {
closeLensClient(lensClient);
s_clientMap.remove(lensClient);
}
if (qh != null) {
s_paraToQH.remove(context.getParagraphId());
}
}
return new InterpreterResult(Code.SUCCESS, formatResult(st, res));
}
private void closeShell(JLineShell shell) {
if (shell instanceof LensJLineShellComponent) {
((LensJLineShellComponent) shell).stop();
} else {
((JLineShellComponent) shell).stop();
}
}
private String formatResult(String st, CommandResult result) {
if (result == null) {
return "error in interpret, no result object returned";
}
if (!result.isSuccess() || result.getResult() == null) {
if (result.getException() != null) {
return result.getException().getMessage();
//try describe cube (without cube name)- error is written as a warning,
//but not returned to result object
} else {
return "error in interpret, unable to execute command";
}
}
StringBuilder sb = new StringBuilder();
for (Map.Entry<String, Pattern> entry : LENS_TABLE_FORMAT_REGEX.entrySet()) {
if (entry.getValue().matcher(st.toLowerCase()).find()) {
sb.append("%table " + entry.getKey() + " \n");
break;
}
}
if (s_queryExecutePattern.matcher(st.toLowerCase()).find() &&
result.getResult().toString().contains(" rows process in (")) {
sb.append("%table ");
}
if (sb.length() > 0) {
return sb.append(result.getResult().toString()).toString();
}
return result.getResult().toString();
//Lens sends error messages without setting result.isSuccess() = false.
}
@Override
public void cancel(InterpreterContext context) {
if (!s_paraToQH.containsKey(context.getParagraphId())) {
s_logger.error("ignoring cancel from " + context.getParagraphId());
return;
}
String qh = s_paraToQH.get(context.getParagraphId()).getQueryHandle();
s_logger.info("preparing to cancel : (" + context.getParagraphId() + ") :" + qh);
Bootstrap bs = createBootstrap();
JLineShell shell = getJLineShell(bs);
LensClient lensClient = null;
try {
lensClient = createAndSetLensClient(bs);
s_clientMap.put(lensClient, true);
s_logger.info("invoke query kill (" + context.getParagraphId() + ") " + qh);
CommandResult res = shell.executeCommand("query kill " + qh);
s_logger.info("query kill returned (" + context.getParagraphId() + ") " + qh
+ " with: " + res.getResult());
} catch (Exception e) {
s_logger.error("unable to kill query ("
+ context.getParagraphId() + ") " + qh, e);
} finally {
try {
if (lensClient != null) {
closeLensClient(lensClient);
s_clientMap.remove(lensClient);
}
closeLensClient(s_paraToQH.get(context.getParagraphId()).getLensClient());
closeShell(s_paraToQH.get(context.getParagraphId()).getShell());
} catch (Exception e) {
// ignore
s_logger.info("Exception in LensInterpreter while cancel finally, ignore", e);
}
s_paraToQH.remove(context.getParagraphId());
closeShell(shell);
}
}
@Override
public FormType getFormType() {
return FormType.SIMPLE;
}
@Override
public int getProgress(InterpreterContext context) {
if (s_paraToQH.containsKey(context.getParagraphId())) {
s_logger.info("number of items for which progress can be reported :" + s_paraToQH.size());
s_logger.info("number of open lensclient :" + s_clientMap.size());
Bootstrap bs = createBootstrap();
JLineShell shell = getJLineShell(bs);
LensClient lensClient = null;
String qh = s_paraToQH.get(context.getParagraphId()).getQueryHandle();
try {
s_logger.info("fetch query status for : (" + context.getParagraphId() + ") :" + qh);
lensClient = createAndSetLensClient(bs);
s_clientMap.put(lensClient, true);
CommandResult res = shell.executeCommand("query status " + qh);
s_logger.info(context.getParagraphId() + " --> " + res.getResult().toString());
//change to debug
Pattern pattern = Pattern.compile(".*(Progress : (\\d\\.\\d)).*");
Matcher matcher = pattern.matcher(res.getResult().toString().replaceAll("\\n", " "));
if (matcher.find(2)) {
Double d = Double.parseDouble(matcher.group(2)) * 100;
if (d.intValue() == 100) {
s_paraToQH.remove(context.getParagraphId());
}
return d.intValue();
} else {
return 1;
}
}
catch (Exception e) {
s_logger.error("unable to get progress for (" + context.getParagraphId() + ") :" + qh, e);
s_paraToQH.remove(context.getParagraphId());
return 0;
} finally {
if (lensClient != null) {
closeLensClient(lensClient);
s_clientMap.remove(lensClient);
}
if (shell != null) {
closeShell(shell);
}
}
}
return 0;
}
@Override
public List<InterpreterCompletion> completion(String buf, int cursor) {
return null;
}
public boolean concurrentRequests() {
return Boolean.parseBoolean(getProperty(ZEPPELIN_LENS_RUN_CONCURRENT_SESSION));
}
@Override
public Scheduler getScheduler() {
if (concurrentRequests()) {
return SchedulerFactory.singleton().createOrGetParallelScheduler(
LensInterpreter.class.getName() + this.hashCode(), m_maxThreads);
} else {
return super.getScheduler();
}
}
}
| apache-2.0 |
zhujainxipan/colorweibo | weibo/pulltorefresh-library/gen/com/handmark/pulltorefresh/library/Manifest.java | 204 | /*___Generated_by_IDEA___*/
package com.handmark.pulltorefresh.library;
/* This stub is only used by the IDE. It is NOT the Manifest class actually packed into the APK */
public final class Manifest {
} | apache-2.0 |
matrix-stone/cobarclient | src/main/java/com/alibaba/cobar/client/datasources/ha/PassiveEventHotSwappableAdvice.java | 6411 | /**
* Copyright 1999-2011 Alibaba Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.cobar.client.datasources.ha;
import java.sql.SQLException;
import javax.sql.DataSource;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.aop.target.HotSwappableTargetSource;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataAccessResourceFailureException;
import org.springframework.jdbc.support.SQLStateSQLExceptionTranslator;
public class PassiveEventHotSwappableAdvice implements MethodInterceptor, InitializingBean {
private final transient Logger logger = LoggerFactory
.getLogger(PassiveEventHotSwappableAdvice.class);
private static final Integer DEFAULT_RETRY_TIMES = 3;
private SQLStateSQLExceptionTranslator sqlExTranslator = new SQLStateSQLExceptionTranslator();
/**
* threshold to indicate until how many times we will stop hot swap between
* HA data sources.<br>
* default behavior is always swap(with threshold value to be
* Integer.MAX_VALUE).
*/
private Integer swapTimesThreshold = Integer.MAX_VALUE;
/**
* In fact, this is not necessary since DataSource implementations like C3P0
* or DBCP has properties to enable connection retry or recovery. as long as
* you configure the underlying data source implementation instances, they
* will do this job for you.
*/
private Integer retryTimes = DEFAULT_RETRY_TIMES;
/**
* time unit in milliseconds
*/
private long retryInterval = 1000;
private String detectingSql = "SELECT 1";
private HotSwappableTargetSource targetSource;
private DataSource mainDataSource;
private DataSource standbyDataSource;
public Object invoke(MethodInvocation invocation) throws Throwable {
if (!StringUtils.equalsIgnoreCase(invocation.getMethod().getName(), "getConnection")) {
return invocation.proceed();
}
try {
return invocation.proceed();
// need to check with detecting sql?
} catch (Throwable t) {
if (t instanceof SQLException) {
// we use SQLStateSQLExceptionTranslator to translate SQLExceptions , but it doesn't mean it will work as we expected,
// so maybe more scope should be covered. we will check out later with runtime data statistics.
DataAccessException dae = sqlExTranslator.translate(
"translate to check whether it's a resource failure exception", null,
(SQLException) t);
if (dae instanceof DataAccessResourceFailureException) {
logger.warn("failed to get Connection from data source with exception:\n{}", t);
doSwap();
return invocation.getMethod().invoke(targetSource.getTarget(),
invocation.getArguments());
}
}
// other exception conditions should be handled by application,
// 'cause we don't have enough context information to decide what to do here.
throw t;
}
}
private void doSwap() {
synchronized (targetSource) {
DataSource target = (DataSource) getTargetSource().getTarget();
if (target == mainDataSource) {
logger.warn("hot swap from '" + target + "' to '" + standbyDataSource + "'.");
getTargetSource().swap(standbyDataSource);
} else {
logger.warn("hot swap from '" + target + "' to '" + mainDataSource + "'.");
getTargetSource().swap(mainDataSource);
}
}
}
public Integer getSwapTimesThreshold() {
return swapTimesThreshold;
}
public void setSwapTimesThreshold(Integer swapTimesThreshold) {
this.swapTimesThreshold = swapTimesThreshold;
}
public HotSwappableTargetSource getTargetSource() {
return targetSource;
}
public void setTargetSource(HotSwappableTargetSource targetSource) {
this.targetSource = targetSource;
}
public void afterPropertiesSet() throws Exception {
if (targetSource == null || mainDataSource == null || standbyDataSource == null) {
throw new IllegalArgumentException(
"the target source, main data source and standby data source must be set.");
}
}
public void setRetryTimes(Integer retryTimes) {
this.retryTimes = retryTimes;
}
public Integer getRetryTimes() {
return retryTimes;
}
public DataSource getMainDataSource() {
return mainDataSource;
}
public void setMainDataSource(DataSource mainDataSource) {
this.mainDataSource = mainDataSource;
}
public DataSource getStandbyDataSource() {
return standbyDataSource;
}
public void setStandbyDataSource(DataSource standbyDataSource) {
this.standbyDataSource = standbyDataSource;
}
public void setRetryInterval(long retryInterval) {
this.retryInterval = retryInterval;
}
public long getRetryInterval() {
return retryInterval;
}
public void setDetectingSql(String detectingSql) {
this.detectingSql = detectingSql;
}
public String getDetectingSql() {
return detectingSql;
}
}
| apache-2.0 |
mbaluch/keycloak | saml-core/src/main/java/org/keycloak/saml/processing/core/parsers/saml/SAMLStatusResponseTypeParser.java | 8336 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.saml.processing.core.parsers.saml;
import org.keycloak.dom.saml.v2.protocol.StatusCodeType;
import org.keycloak.dom.saml.v2.protocol.StatusDetailType;
import org.keycloak.dom.saml.v2.protocol.StatusResponseType;
import org.keycloak.dom.saml.v2.protocol.StatusType;
import org.keycloak.saml.common.PicketLinkLogger;
import org.keycloak.saml.common.PicketLinkLoggerFactory;
import org.keycloak.saml.common.constants.JBossSAMLConstants;
import org.keycloak.saml.common.exceptions.ParsingException;
import org.keycloak.saml.common.util.StaxParserUtil;
import org.keycloak.saml.common.util.StringUtil;
import org.keycloak.saml.processing.core.saml.v2.util.XMLTimeUtil;
import org.w3c.dom.Element;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.events.Attribute;
import javax.xml.stream.events.EndElement;
import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent;
import java.net.URI;
/**
* Base Class for all Response Type parsing for SAML2
*
* @author Anil.Saldhana@redhat.com
* @since Nov 2, 2010
*/
public abstract class SAMLStatusResponseTypeParser {
protected static final PicketLinkLogger logger = PicketLinkLoggerFactory.getLogger();
/**
* Parse the attributes that are common to all SAML Response Types
*
* @param startElement
* @param response
*
* @throws org.keycloak.saml.common.exceptions.ParsingException
*/
protected StatusResponseType parseBaseAttributes(StartElement startElement) throws ParsingException {
Attribute idAttr = startElement.getAttributeByName(new QName(JBossSAMLConstants.ID.get()));
if (idAttr == null)
throw logger.parserRequiredAttribute("ID");
String id = StaxParserUtil.getAttributeValue(idAttr);
Attribute version = startElement.getAttributeByName(new QName(JBossSAMLConstants.VERSION.get()));
if (version == null)
throw logger.parserRequiredAttribute("Version");
StringUtil.match(JBossSAMLConstants.VERSION_2_0.get(), StaxParserUtil.getAttributeValue(version));
Attribute issueInstant = startElement.getAttributeByName(new QName(JBossSAMLConstants.ISSUE_INSTANT.get()));
if (issueInstant == null)
throw logger.parserRequiredAttribute("IssueInstant");
XMLGregorianCalendar issueInstantVal = XMLTimeUtil.parse(StaxParserUtil.getAttributeValue(issueInstant));
StatusResponseType response = new StatusResponseType(id, issueInstantVal);
Attribute destination = startElement.getAttributeByName(new QName(JBossSAMLConstants.DESTINATION.get()));
if (destination != null)
response.setDestination(StaxParserUtil.getAttributeValue(destination));
Attribute consent = startElement.getAttributeByName(new QName(JBossSAMLConstants.CONSENT.get()));
if (consent != null)
response.setConsent(StaxParserUtil.getAttributeValue(consent));
Attribute inResponseTo = startElement.getAttributeByName(new QName(JBossSAMLConstants.IN_RESPONSE_TO.get()));
if (inResponseTo != null)
response.setInResponseTo(StaxParserUtil.getAttributeValue(inResponseTo));
return response;
}
/**
* Parse the status element
*
* @param xmlEventReader
*
* @return
*
* @throws ParsingException
*/
protected StatusType parseStatus(XMLEventReader xmlEventReader) throws ParsingException {
// Get the Start Element
StartElement startElement = StaxParserUtil.getNextStartElement(xmlEventReader);
String STATUS = JBossSAMLConstants.STATUS.get();
StaxParserUtil.validate(startElement, STATUS);
StatusType status = new StatusType();
while (xmlEventReader.hasNext()) {
startElement = StaxParserUtil.peekNextStartElement(xmlEventReader);
if (startElement == null)
break;
QName startElementName = startElement.getName();
String elementTag = startElementName.getLocalPart();
StatusCodeType statusCode = new StatusCodeType();
if (JBossSAMLConstants.STATUS_CODE.get().equals(elementTag)) {
startElement = StaxParserUtil.getNextStartElement(xmlEventReader);
if (startElement == null)
break;
Attribute valueAttr = startElement.getAttributeByName(new QName("Value"));
if (valueAttr != null) {
statusCode.setValue(URI.create(StaxParserUtil.getAttributeValue(valueAttr)));
}
status.setStatusCode(statusCode);
// Peek at the next start element to see if it is status code
startElement = StaxParserUtil.peekNextStartElement(xmlEventReader);
if (startElement == null) {
// Go to Status code end element.
EndElement endElement = StaxParserUtil.getNextEndElement(xmlEventReader);
if (endElement != null) {
StaxParserUtil.validate(endElement, JBossSAMLConstants.STATUS_CODE.get());
}
continue;
}
elementTag = startElement.getName().getLocalPart();
if (JBossSAMLConstants.STATUS_CODE.get().equals(elementTag)) {
StatusCodeType subStatusCodeType = new StatusCodeType();
startElement = StaxParserUtil.getNextStartElement(xmlEventReader);
Attribute subValueAttr = startElement.getAttributeByName(new QName("Value"));
if (subValueAttr != null) {
subStatusCodeType.setValue(URI.create(StaxParserUtil.getAttributeValue(subValueAttr)));
}
statusCode.setStatusCode(subStatusCodeType);
// Go to Status code end element.
EndElement endElement = StaxParserUtil.getNextEndElement(xmlEventReader);
StaxParserUtil.validate(endElement, JBossSAMLConstants.STATUS_CODE.get());
continue;
}
}
if (JBossSAMLConstants.STATUS_MESSAGE.get().equals(elementTag)) {
startElement = StaxParserUtil.getNextStartElement(xmlEventReader);
if (startElement == null)
break;
status.setStatusMessage(StaxParserUtil.getElementText(xmlEventReader));
}
if (JBossSAMLConstants.STATUS_DETAIL.get().equals(elementTag)) {
startElement = StaxParserUtil.getNextStartElement(xmlEventReader);
if (startElement == null)
break;
Element domElement = StaxParserUtil.getDOMElement(xmlEventReader);
StatusDetailType statusDetailType = new StatusDetailType();
statusDetailType.addStatusDetail(domElement);
status.setStatusDetail(statusDetailType);
}
// Get the next end element
XMLEvent xmlEvent = StaxParserUtil.peek(xmlEventReader);
if (xmlEvent instanceof EndElement) {
EndElement endElement = StaxParserUtil.getNextEndElement(xmlEventReader);
if (StaxParserUtil.matches(endElement, STATUS))
break;
else
throw logger.parserUnknownEndElement(StaxParserUtil.getEndElementName(endElement));
} else
break;
}
return status;
}
} | apache-2.0 |
bluerover/6lbr | tools/cooja/java/org/contikios/cooja/SimEventCentral.java | 11517 | /*
* Copyright (c) 2009, Swedish Institute of Computer Science. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer. 2. Redistributions in
* binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution. 3. Neither the name of the
* Institute nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.contikios.cooja;
import java.io.File;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Observable;
import java.util.Observer;
import java.util.Vector;
import org.apache.log4j.Logger;
import org.jdom.Element;
import org.contikios.cooja.MoteType.MoteTypeCreationException;
import org.contikios.cooja.interfaces.Log;
import org.contikios.cooja.util.ArrayUtils;
/**
* Simulation event central. Simplifies implementations of plugins that observe
* motes and mote interfaces by keeping track of added and removed motes. For a
* selected set of interfaces, the event central also maintains an event
* history.
*
* @see LogOutputEvent
* @author Fredrik Osterlind
*/
public class SimEventCentral {
private static Logger logger = Logger.getLogger(SimEventCentral.class);
private Simulation simulation;
public SimEventCentral(Simulation simulation) {
this.simulation = simulation;
/* Default buffer sizes */
logOutputBufferSize = Integer.parseInt(Cooja.getExternalToolsSetting("BUFFERSIZE_LOGOUTPUT", "" + 40000));
moteObservations = new ArrayList<MoteObservation>();
/* Mote count: notifications */
moteCountListeners = new MoteCountListener[0];
/* Log output: notifications and history */
logOutputListeners = new LogOutputListener[0];
logOutputEvents = new ArrayDeque<LogOutputEvent>();
}
/* GENERIC */
private static class MoteEvent {
public static int _ID_COUNTER = 0; /* Debugging */
public final int ID; /* Debugging */
private final Mote mote;
private final long time;
public MoteEvent(Mote mote, long time) {
ID = _ID_COUNTER++;
this.mote = mote;
this.time = time;
}
public Mote getMote() {
return mote;
}
public long getTime() {
return time;
}
public String toString() {
return "" + ID;
}
}
/** Help class for maintaining mote-specific observations */
private static class MoteObservation {
private final Mote mote;
private final Observable observable;
private final Observer observer;
public MoteObservation(Mote mote, Observable observable, Observer observer) {
this.mote = mote;
this.observable = observable;
this.observer = observer;
observable.addObserver(observer);
}
public Mote getMote() {
return mote;
}
public Observer getObserver() {
return observer;
}
public void disconnect() {
observable.deleteObserver(observer);
}
}
private ArrayList<MoteObservation> moteObservations;
/* ADDED/REMOVED MOTES */
public interface MoteCountListener {
public void moteWasAdded(Mote mote);
public void moteWasRemoved(Mote mote);
}
private MoteCountListener[] moteCountListeners;
private Observer moteCountObserver = new Observer() {
public void update(Observable obs, Object obj) {
if (obj == null || !(obj instanceof Mote)) {
return;
}
Mote evMote = (Mote) obj;
/* Check whether mote was added or removed */
Mote[] allMotes = simulation.getMotes();
boolean exists = false;
for (Mote m: allMotes) {
if (m == evMote) {
exists = true;
break;
}
}
if (exists) {
/* Mote was added */
moteWasAdded(evMote);
/* Notify external listeners */
for (MoteCountListener l: moteCountListeners) {
l.moteWasAdded(evMote);
}
} else {
/* Mote was removed */
moteWasRemoved(evMote);
/* Notify external listeners */
for (MoteCountListener l: moteCountListeners) {
l.moteWasRemoved(evMote);
}
}
}
};
public void addMoteCountListener(MoteCountListener listener) {
if (moteCountListeners.length == 0) {
/* Observe simulation for added/removed motes */
simulation.addObserver(moteCountObserver);
}
moteCountListeners = ArrayUtils.add(moteCountListeners, listener);
}
public void removeMoteCountListener(MoteCountListener listener) {
moteCountListeners = ArrayUtils.remove(moteCountListeners, listener);
if (moteCountListeners.length == 0) {
/* Stop observing simulation for added/removed motes */
simulation.deleteObserver(moteCountObserver);
}
}
/* LOG OUTPUT */
public static class LogOutputEvent extends MoteEvent {
public final String msg;
public LogOutputEvent(Mote mote, long time, String msg) {
super(mote, time);
this.msg = msg;
}
public String getMessage() {
return msg;
}
}
private int logOutputBufferSize;
private ArrayDeque<LogOutputEvent> logOutputEvents;
public interface LogOutputListener extends MoteCountListener {
public void removedLogOutput(LogOutputEvent ev);
public void newLogOutput(LogOutputEvent ev);
}
private LogOutputListener[] logOutputListeners;
private Observer logOutputObserver = new Observer() {
public void update(Observable obs, Object obj) {
Mote mote = (Mote) obj;
String msg = ((Log) obs).getLastLogMessage();
if (msg == null) {
return;
}
if (msg.length() > 0 && msg.charAt(msg.length() - 1) == '\n') {
msg = msg.substring(0, msg.length() - 1);
}
/* We may have to remove some events now */
while (logOutputEvents.size() > logOutputBufferSize-1) {
LogOutputEvent removed;
synchronized (logOutputEvents) {
removed = logOutputEvents.pollFirst();
}
if (removed == null) {
break;
}
for (LogOutputListener l: logOutputListeners) {
l.removedLogOutput(removed);
}
}
/* Store log output, and notify listeners */
LogOutputEvent ev = new LogOutputEvent(mote, simulation.getSimulationTime(), msg);
synchronized (logOutputEvents) {
logOutputEvents.add(ev);
}
for (LogOutputListener l: logOutputListeners) {
l.newLogOutput(ev);
}
}
};
public void addLogOutputListener(LogOutputListener listener) {
if (logOutputListeners.length == 0) {
/* Start observing all log interfaces */
Mote[] motes = simulation.getMotes();
for (Mote m: motes) {
for (MoteInterface mi: m.getInterfaces().getInterfaces()) {
if (mi instanceof Log) {
moteObservations.add(new MoteObservation(m, mi, logOutputObserver));
}
}
}
}
logOutputListeners = ArrayUtils.add(logOutputListeners, listener);
addMoteCountListener(listener);
}
public void removeLogOutputListener(LogOutputListener listener) {
logOutputListeners = ArrayUtils.remove(logOutputListeners, listener);
removeMoteCountListener(listener);
if (logOutputListeners.length == 0) {
/* Stop observing all log interfaces */
MoteObservation[] observations = moteObservations.toArray(new MoteObservation[0]);
for (MoteObservation o: observations) {
if (o.getObserver() == logOutputObserver) {
o.disconnect();
moteObservations.remove(o);
}
}
/* Clear logs (TODO config) */
logOutputEvents.clear();
}
}
public LogOutputEvent[] getLogOutputHistory() {
synchronized (logOutputEvents) {
return logOutputEvents.toArray(new LogOutputEvent[0]);
}
}
public int getLogOutputBufferSize() {
return logOutputBufferSize;
}
public void setLogOutputBufferSize(int size) {
logOutputBufferSize = size;
/* We may have to remove some events now */
while (logOutputEvents.size() > logOutputBufferSize) {
LogOutputEvent removed = logOutputEvents.pollFirst();
if (removed == null) {
break;
}
for (LogOutputListener l: logOutputListeners) {
l.removedLogOutput(removed);
}
}
}
public int getLogOutputObservationsCount() {
int count=0;
MoteObservation[] observations = moteObservations.toArray(new MoteObservation[0]);
for (MoteObservation o: observations) {
if (o.getObserver() == logOutputObserver) {
count++;
}
}
return count;
}
/* HELP METHODS: MAINTAIN OBSERVERS */
private void moteWasAdded(Mote mote) {
if (logOutputListeners.length > 0) {
/* Add another log output observation.
* (Supports multiple log interfaces per mote) */
for (MoteInterface mi: mote.getInterfaces().getInterfaces()) {
if (mi instanceof Log) {
moteObservations.add(new MoteObservation(mote, mi, logOutputObserver));
}
}
}
/* ... */
}
private void moteWasRemoved(Mote mote) {
/* Disconnect and remove mote observations */
MoteObservation[] observations = moteObservations.toArray(new MoteObservation[0]);
for (MoteObservation o: observations) {
if (o.getMote() == mote) {
o.disconnect();
moteObservations.remove(o);
}
}
}
public String toString() {
return
"\nActive mote observations: " + moteObservations.size() +
"\n" +
"\nMote count listeners: " + moteCountListeners.length +
"\n" +
"\nLog output listeners: " + logOutputListeners.length +
"\nLog output history: " + logOutputEvents.size()
;
}
public Collection<Element> getConfigXML() {
ArrayList<Element> config = new ArrayList<Element>();
Element element;
/* Log output buffer size */
element = new Element("logoutput");
element.setText("" + logOutputBufferSize);
config.add(element);
return config;
}
public boolean setConfigXML(Simulation simulation,
Collection<Element> configXML, boolean visAvailable)
throws MoteTypeCreationException {
for (Element element : configXML) {
String name = element.getName();
if (name.equals("logoutput")) {
logOutputBufferSize = Integer.parseInt(element.getText());
}
}
return true;
}
}
| bsd-3-clause |
strahanjen/strahanjen.github.io | elasticsearch-master/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/date/DateRangeParser.java | 2190 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.range.date;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
import org.elasticsearch.search.aggregations.bucket.range.RangeParser;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import java.util.List;
import java.util.Map;
/**
*
*/
public class DateRangeParser extends RangeParser {
public DateRangeParser() {
super(true, true, true);
}
@Override
protected DateRangeAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder(aggregationName);
@SuppressWarnings("unchecked")
List<Range> ranges = (List<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
for (Range range : ranges) {
factory.addRange(range);
}
Boolean keyed = (Boolean) otherOptions.get(RangeAggregator.KEYED_FIELD);
if (keyed != null) {
factory.keyed(keyed);
}
return factory;
}
}
| bsd-3-clause |
tsdl2013/DBFlow | DBFlow/src/main/java/com/raizlabs/android/dbflow/sql/queriable/AsyncQuery.java | 2798 | package com.raizlabs.android.dbflow.sql.queriable;
import android.database.Cursor;
import com.raizlabs.android.dbflow.runtime.DBTransactionInfo;
import com.raizlabs.android.dbflow.runtime.DBTransactionQueue;
import com.raizlabs.android.dbflow.runtime.TransactionManager;
import com.raizlabs.android.dbflow.runtime.transaction.QueryTransaction;
import com.raizlabs.android.dbflow.runtime.transaction.SelectListTransaction;
import com.raizlabs.android.dbflow.runtime.transaction.SelectSingleModelTransaction;
import com.raizlabs.android.dbflow.runtime.transaction.TransactionListener;
import com.raizlabs.android.dbflow.structure.Model;
import java.util.List;
/**
* Description: Adds async methods to a {@link ModelQueriable}
*/
public class AsyncQuery<ModelClass extends Model> {
private final ModelQueriable<ModelClass> modelQueriable;
private final TransactionManager transactionManager;
/**
* Constructs an instance of this async query.
*
* @param queriable The queriable object to use to query data.
* @param transactionManager The manager to run this query on
*/
public AsyncQuery(ModelQueriable<ModelClass> queriable, TransactionManager transactionManager) {
this.modelQueriable = queriable;
this.transactionManager = transactionManager;
}
/**
* Runs the specified query in the background.
*/
public void execute() {
transactionManager.addTransaction(new QueryTransaction<>(DBTransactionInfo.create(), modelQueriable));
}
/**
* Queries the list on the {@link DBTransactionQueue}
*
* @param transactionListener Listens for transaction events.
*/
public void queryList(TransactionListener<List<ModelClass>> transactionListener) {
transactionManager.addTransaction(new SelectListTransaction<>(modelQueriable, transactionListener));
}
/**
* Queries a single item on the {@link DBTransactionQueue}
*
* @param transactionListener Listens for transaction events.
*/
public void querySingle(TransactionListener<ModelClass> transactionListener) {
transactionManager.addTransaction(new SelectSingleModelTransaction<>(modelQueriable, transactionListener));
}
/**
* @return The table this Query is associated with.
*/
public Class<ModelClass> getTable() {
return modelQueriable.getTable();
}
/**
* Queries the raw {@link Cursor} object from the contained query.
*
* @param transactionListener Listens for transaction events.
*/
public void query(TransactionListener<Cursor> transactionListener) {
transactionManager.addTransaction(
new QueryTransaction<>(DBTransactionInfo.create(), modelQueriable, transactionListener));
}
}
| mit |
geothomasp/kualico-rice-kc | rice-middleware/core-service/framework/src/main/java/org/kuali/rice/coreservice/framework/parameter/ParameterService.java | 39842 | /**
* Copyright 2005-2015 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.coreservice.framework.parameter;
import org.kuali.rice.coreservice.api.parameter.Parameter;
import java.util.Collection;
/**
* This service is used by krad to interact with {@link Parameter Parameters}.
*
* <p>
* Generally krad client applications will want to use this service since it contains many convenient methods.
* </p>
*
* <p>
* This service can be viewed a convenient wrapper around the {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService}.
* Please see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService} for details on the behavior of this service.
* </p>
*/
public interface ParameterService {
/**
* This will create a {@link Parameter} exactly like the parameter passed in.
*
* @param parameter the {@link Parameter} to create.
* @return the created {@link Parameter}.
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#createParameter(org.kuali.rice.coreservice.api.parameter.Parameter)} for details
*/
Parameter createParameter(Parameter parameter);
/**
* This will update a {@link Parameter}.
*
* @param parameter the {@link Parameter} to update.
* @return the updated {@link Parameter}.
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#updateParameter(org.kuali.rice.coreservice.api.parameter.Parameter)} for details
*/
Parameter updateParameter(Parameter parameter);
/**
* Retrieves a parameter.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @return true or false
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameter(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Parameter getParameter(String namespaceCode, String componentCode, String parameterName);
/**
* Retrieves a parameter.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @return true or false
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameter(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Parameter getParameter(Class<?> componentClass, String parameterName);
/**
* This method checks if a parameter exists, never returning null.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* <p>
* If the parameter does not exist under the application
* code, then this method will check if the parameter
* exists under the default rice application id and
* will return that parameter.
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @return true or false
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*/
Boolean parameterExists(String namespaceCode, String componentCode, String parameterName);
/**
* This method checks if a parameter exists, never returning null.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* <p>
* If the parameter does not exist under the application
* code, then this method will check if the parameter
* exists under the default rice application id and
* will return that parameter.
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @return true or false
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*/
Boolean parameterExists(Class<?> componentClass, String parameterName);
/**
* Retrieves a parameter's boolean value, returning null if the parameter cannot be found or is not a boolean.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @return true, false, null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsBoolean(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Boolean getParameterValueAsBoolean(String namespaceCode, String componentCode, String parameterName);
/**
* Retrieves a parameter's boolean value, returning {@code defaultValue} if the parameter cannot be found or is not
* a boolean.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @param defaultValue the value to return is the parameter does not exist. Can be any Boolean value including null
* @return true, false, or the defaultValue
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsBoolean(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Boolean getParameterValueAsBoolean(String namespaceCode, String componentCode, String parameterName, Boolean defaultValue);
/**
* Retrieves a parameter's boolean value, returning null if the parameter cannot be found or is not a boolean.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @return true, false, null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsBoolean(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Boolean getParameterValueAsBoolean(Class<?> componentClass, String parameterName);
/**
* Retrieves a parameter's boolean value, returning {@code defaultValue} if the parameter cannot be found or is not
* a boolean.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @param defaultValue the value to return is the parameter does not exist. Can be any Boolean value including null
* @return true, false, or the defaultValue
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsBoolean(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Boolean getParameterValueAsBoolean(Class<?> componentClass, String parameterName, Boolean defaultValue);
/**
* Retrieves a parameter's string value, returning null if the parameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @return string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
String getParameterValueAsString(String namespaceCode, String componentCode, String parameterName);
/**
* Retrieves a parameter's string value, returning {@code defaultValue} if the parameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @param defaultValue the value to return is the parameter does not exist. Can be any string value including null
* @return string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
String getParameterValueAsString(String namespaceCode, String componentCode, String parameterName, String defaultValue);
/**
* Retrieves a parameter's string value, returning null if the parameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @return string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
String getParameterValueAsString(Class<?> componentClass, String parameterName);
/**
* Retrieves a parameter's string value, returning {@code defaultValue} if the parameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @param defaultValue the value to return is the parameter does not exist. Can be any string value including null
* @return string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
String getParameterValueAsString(Class<?> componentClass, String parameterName, String defaultValue);
/**
* Retrieves a parameter's string value filtered through the deploy time configuration variables, returning null if
* the parameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @return filtered string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
String getParameterValueAsFilteredString(String namespaceCode, String componentCode, String parameterName);
/**
* Retrieves a parameter's string value filtered through the deploy time configuration variables, returning an
* unfiltered {@code defaultValue} if the parameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @param defaultValue the value to return is the parameter does not exist. Can be any string value including null
* @return filtered string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
String getParameterValueAsFilteredString(String namespaceCode, String componentCode, String parameterName, String defaultValue);
/**
* Retrieves a parameter's string value filtered through the deploy time configuration variables, returning null if
* the parameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @return filtered string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
String getParameterValueAsFilteredString(Class<?> componentClass, String parameterName);
/**
* Retrieves a parameter's string value filtered through the deploy time configuration variables, returning an
* unfiltered {@code defaultValue} if the parameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @param defaultValue the value to return is the parameter does not exist. Can be any string value including null
* @return filtered string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
String getParameterValueAsFilteredString(Class<?> componentClass, String parameterName, String defaultValue);
/**
* Retrieves a parameter's string values where a parameter contains 0 or more values.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @return string values or empty Collection
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValuesAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Collection<String> getParameterValuesAsString(String namespaceCode, String componentCode, String parameterName);
/**
* Retrieves a parameter's string values where a parameter contains 0 or more values.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @return string values or empty Collection
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValuesAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Collection<String> getParameterValuesAsString(Class<?> componentClass, String parameterName);
/**
* Retrieves a parameter's string values where a parameter contains 0 or more values, filtered through the deploy
* time configuration variables.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @return filtered string values or empty Collection
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValuesAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Collection<String> getParameterValuesAsFilteredString(String namespaceCode, String componentCode, String parameterName);
/**
* Retrieves a parameter's string values where a parameter contains 0 or more values, filtered through the deploy
* time configuration variables.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @return filtered string values or empty Collection
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getParameterValuesAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey)} for details
*/
Collection<String> getParameterValuesAsFilteredString(Class<?> componentClass, String parameterName);
/**
* Retrieves a subParameter's string value, returning null if the subParameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @param subParameterName the subParameter name
* @return string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getSubParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey, String)} for details
*/
String getSubParameterValueAsString(String namespaceCode, String componentCode, String parameterName, String subParameterName);
/**
* Retrieves a subParameter's string value, returning null if the subParameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @return string value or null
*
* @param subParameterName the subParameter name
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getSubParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey, String)} for details
*/
String getSubParameterValueAsString(Class<?> componentClass, String parameterName, String subParameterName);
/**
* Retrieves a subParameter's string value filtered through the deploy time configuration variables, returning null
* if the subParameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from the passed in namespace code</li>
* <li>component code: from the passed in component code</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @param subParameterName the subParameter name
* @return filtered string value or null
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getSubParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey, String)} for details
*/
String getSubParameterValueAsFilteredString(String namespaceCode, String componentCode, String parameterName, String subParameterName);
/**
* Retrieves a subParameter's string value filtered through the deploy time configuration variables, returning null
* if the subParameter cannot be found.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @return filtered string value or null
*
* @param subParameterName the subParameter name
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getSubParameterValueAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey, String)} for details
*/
String getSubParameterValueAsFilteredString(Class<?> componentClass, String parameterName, String subParameterName);
/**
* Retrieves a subParameter's string values where a subParameter contains 0 or more values.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @param subParameterName the subParameter name
* @return string values or empty Collection
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getSubParameterValuesAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey, String)} for details
*/
Collection<String> getSubParameterValuesAsString(String namespaceCode, String componentCode, String parameterName, String subParameterName);
/**
* Retrieves a subParameter's string values where a subParameter contains 0 or more values.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @param subParameterName the subParameter name
* @return string values or empty Collection
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getSubParameterValuesAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey, String)} for details
*/
Collection<String> getSubParameterValuesAsString(Class<?> componentClass, String parameterName, String subParameterName);
/**
* Retrieves a subParameter's string values where a subParameter contains 0 or more values, filtered through the
* deploy time configuration variables.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param namespaceCode the namespace code
* @param componentCode the component code
* @param parameterName the parameter name
* @param subParameterName the subParameter name
* @return filtered string values or empty Collection
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getSubParameterValuesAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey, String)} for details
*/
Collection<String> getSubParameterValuesAsFilteredString(String namespaceCode, String componentCode, String parameterName, String subParameterName);
/**
* Retrieves a subParameter's string values where a subParameter contains 0 or more values, filtered through the
* deploy time configuration variables.
*
* <p>
* The parameter key is constructed from the following:
* <ul>
* <li>namespace code: from a {@link ParameterConstants.NAMESPACE}
* annotation on the componentClass</li>
* <li>component code: from a {@link ParameterConstants.COMPONENT}
* annotation on the componentClass</li>
* <li>parameter name: from the passed in parameter name</li>
* <li>application id: from the client configuration of the service implementation</li>
* </ul>
* </p>
*
* @param componentClass the class with the namespace & component annotations
* @param parameterName the parameter name
* @param subParameterName the subParameter name
* @return filtered string values or empty Collection
*
* @throws IllegalArgumentException if any arguments are null
* @throws IllegalStateException if the application id is not configured correctly
*
* @see {@link org.kuali.rice.coreservice.api.parameter.ParameterRepositoryService#getSubParameterValuesAsString(org.kuali.rice.coreservice.api.parameter.ParameterKey, String)} for details
*/
Collection<String> getSubParameterValuesAsFilteredString(Class<?> componentClass, String parameterName, String subParameterName);
} | apache-2.0 |
siosio/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/streamToLoop/beforeFlatMap.java | 5028 | // "Fix all 'Stream API call chain can be replaced with loop' problems in file" "true"
import java.util.*;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import static java.util.Arrays.asList;
public class Main {
private static long testChain(List<? extends String> list) {
return Stream.of(0, null, "1", list).flatMap(Stream::of).flatMap(Stream::of).flatMap(Stream::of).flatMap(Stream::of).flatMap(Stream::of).cou<caret>nt();
}
public static void testComplexFilter(List<String> list) {
System.out.println(list.stream()
.filter(x -> x != null)
.flatMap(s -> (IntStream.range(0, 10).boxed().filter(Predicate.isEqual(s.length()))))
.collect(Collectors.toList()));
}
public void testConditional(List<List<String>> list) {
list.stream().flatMap(lst -> lst == null ? (Stream.empty()) : (lst.stream())).forEach(System.out::println);
}
private static long testDistinctUnpluralize(List<List<String>> nested) {
return nested.stream().flatMap(names -> names.stream().distinct()).count();
}
public static IntSummaryStatistics testLimit() {
return IntStream.range(0, 100).flatMap(x -> IntStream.range(0, x).limit(x/2)).limit(50).summaryStatistics();
}
public static IntSummaryStatistics testLimit3() {
return IntStream.range(0, 100).flatMap(x -> IntStream.range(0, x).flatMap(y -> IntStream.range(y, y + 100).limit(10)).limit(x / 2)).limit(500).summaryStatistics();
}
public static IntSummaryStatistics testLimitCrazy() {
return IntStream.range(0, 100).flatMap(
x -> IntStream.range(0, x).flatMap(
y -> IntStream.range(y, y + 100).flatMap(
z -> IntStream.range(z, z+2)).limit(10))
.limit(x / 2))
.limit(500)
.summaryStatistics();
}
private static List<String> testMethodRef(List<List<String>> list) {
return list.stream().flatMap(Collection::stream).collect(Collectors.toList());
}
private static List<String> testMethodRef2(List<String[]> list) {
return list.stream().flatMap(Stream::of).collect(Collectors.toList());
}
private static List<List<String>> testMethodRef3(List<List<List<String>>> list) {
return list.stream().flatMap(List::stream).collect(Collectors.toList());
}
private static long testBoundRename(Map<String, List<String>> strings) {
return strings.entrySet().stream().filter(e -> !e.getKey().isEmpty())
.flatMap(entry -> entry.getValue().stream().filter(entry.getKey()::equals))
.count();
}
public static IntSummaryStatistics testNestedFlatMap(List<List<List<String>>> list) {
return list.stream().filter(l -> l != null).flatMap(l -> l.stream().filter(lst -> lst != null).flatMap(lst -> lst.stream())).mapToInt(str -> str.length()).summaryStatistics();
}
public static LongSummaryStatistics testNestedMap(List<List<String>> list) {
return list.stream().filter(a -> a != null).flatMapToLong(lst -> lst.stream().mapToLong(a -> a.length())).summaryStatistics();
}
public static IntSummaryStatistics testNestedSkip(int... values) {
return Arrays.stream(values).skip(2).filter(x -> x > 0).flatMap(v -> IntStream.range(0, 100).skip(v)).summaryStatistics();
}
public static IntSummaryStatistics testNestedSkip2(int... values) {
return Arrays.stream(values).filter(x -> x > 0).flatMap(v -> IntStream.range(0, 100).skip(v)).skip(2).summaryStatistics();
}
public String testSorted(List<List<String>> list) {
return list.stream().flatMap(lst -> lst.stream().filter(Objects::nonNull).sorted()).filter(x -> x.length() < 5).findFirst().orElse("");
}
public static void main(String[] args) {
testChain(asList("aa", "bbb", "c", null, "dd"));
testComplexFilter(asList("a", "bbbb", "cccccccccc", "dd", ""));
System.out.println(testDistinctUnpluralize(asList(asList("a"), asList(null, "bb", "ccc"))));
System.out.println(testLimit());
System.out.println(testLimit3());
System.out.println(testLimitCrazy());
System.out.println(testMethodRef(asList(asList("", "a", "abcd", "xyz"), asList("x", "y"))));
System.out.println(testMethodRef2(asList(new String[] {"", "a", "abcd", "xyz"}, new String[] {"x", "y"})));
System.out.println(testMethodRef3(asList(asList(asList("a", "d")), asList(asList("c"), asList("b")))));
System.out.println(testNestedFlatMap(asList(asList(asList("a", "bbb", "ccc")), asList(), null, asList(asList("z")))));
System.out.println(testNestedMap(asList(null, asList("aaa", "b", "cc", "dddd"), asList("gggg"))));
System.out.println(testNestedSkip(1, 95, -2, 0, 97, 90));
System.out.println(testNestedSkip2(1, 95, -2, 0, 97, 90));
Map<String, List<String>> map = new HashMap<>();
map.put("", asList("", "a", "b"));
map.put("a", asList("", "a", "b", "a"));
map.put("b", asList("", "a", "b"));
map.put("c", asList("", "a", "b"));
System.out.println(testBoundRename(map));
}
} | apache-2.0 |
GlenRSmith/elasticsearch | server/src/test/java/org/elasticsearch/transport/RemoteClusterAwareClientTests.java | 8136 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.transport;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsRequest;
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
public class RemoteClusterAwareClientTests extends ESTestCase {
private final ThreadPool threadPool = new TestThreadPool(getClass().getName());
@Override
public void tearDown() throws Exception {
super.tearDown();
ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
}
private MockTransportService startTransport(String id, List<DiscoveryNode> knownNodes) {
return RemoteClusterConnectionTests.startTransport(id, knownNodes, Version.CURRENT, threadPool);
}
public void testSearchShards() throws Exception {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (
MockTransportService seedTransport = startTransport("seed_node", knownNodes);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes)
) {
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
Settings.Builder builder = Settings.builder();
builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalDiscoNode().getAddress().toString());
try (MockTransportService service = MockTransportService.createNewService(builder.build(), Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (
RemoteClusterAwareClient client = new RemoteClusterAwareClient(
Settings.EMPTY,
threadPool,
service,
"cluster1",
randomBoolean()
)
) {
SearchRequest request = new SearchRequest("test-index");
CountDownLatch responseLatch = new CountDownLatch(1);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index").indicesOptions(
request.indicesOptions()
).local(true).preference(request.preference()).routing(request.routing());
client.admin()
.cluster()
.searchShards(
searchShardsRequest,
new LatchedActionListener<>(
ActionListener.wrap(reference::set, e -> fail("no failures expected")),
responseLatch
)
);
responseLatch.await();
assertNotNull(reference.get());
ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get();
assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes()));
}
}
}
}
public void testSearchShardsThreadContextHeader() {
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
try (
MockTransportService seedTransport = startTransport("seed_node", knownNodes);
MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes)
) {
knownNodes.add(seedTransport.getLocalDiscoNode());
knownNodes.add(discoverableTransport.getLocalDiscoNode());
Collections.shuffle(knownNodes, random());
Settings.Builder builder = Settings.builder();
builder.putList("cluster.remote.cluster1.seeds", seedTransport.getLocalDiscoNode().getAddress().toString());
try (MockTransportService service = MockTransportService.createNewService(builder.build(), Version.CURRENT, threadPool, null)) {
service.start();
service.acceptIncomingRequests();
try (
RemoteClusterAwareClient client = new RemoteClusterAwareClient(
Settings.EMPTY,
threadPool,
service,
"cluster1",
randomBoolean()
)
) {
SearchRequest request = new SearchRequest("test-index");
int numThreads = 10;
ExecutorService executorService = Executors.newFixedThreadPool(numThreads);
for (int i = 0; i < numThreads; i++) {
final String threadId = Integer.toString(i);
executorService.submit(() -> {
ThreadContext threadContext = seedTransport.threadPool.getThreadContext();
threadContext.putHeader("threadId", threadId);
AtomicReference<ClusterSearchShardsResponse> reference = new AtomicReference<>();
final ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index")
.indicesOptions(request.indicesOptions())
.local(true)
.preference(request.preference())
.routing(request.routing());
CountDownLatch responseLatch = new CountDownLatch(1);
client.admin()
.cluster()
.searchShards(searchShardsRequest, new LatchedActionListener<>(ActionListener.wrap(resp -> {
reference.set(resp);
assertEquals(threadId, seedTransport.threadPool.getThreadContext().getHeader("threadId"));
}, e -> fail("no failures expected")), responseLatch));
try {
responseLatch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
assertNotNull(reference.get());
ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get();
assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes()));
});
}
ThreadPool.terminate(executorService, 5, TimeUnit.SECONDS);
}
}
}
}
}
| apache-2.0 |
hequn8128/flink | flink-end-to-end-tests/flink-end-to-end-tests-common/src/main/java/org/apache/flink/tests/util/AutoClosablePath.java | 1492 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.tests.util;
import org.apache.flink.util.FileUtils;
import org.apache.flink.util.Preconditions;
import java.io.IOException;
import java.nio.file.Path;
/**
* Utility class to delete a given {@link Path} when exiting a try-with-resources statement.
*/
public final class AutoClosablePath implements AutoCloseable {
private final Path path;
public AutoClosablePath(final Path path) {
Preconditions.checkNotNull(path, "Path must not be null.");
Preconditions.checkArgument(path.isAbsolute(), "Path must be absolute.");
this.path = path;
}
@Override
public void close() throws IOException {
FileUtils.deleteFileOrDirectory(path.toFile());
}
}
| apache-2.0 |
ascherbakoff/ignite | modules/ml/src/main/java/org/apache/ignite/ml/dataset/feature/package-info.java | 1069 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* <!-- Package description. -->
* Package for helper classes over features such as {@link org.apache.ignite.ml.dataset.feature.ObjectHistogram} or
* {@link org.apache.ignite.ml.dataset.feature.FeatureMeta}.
*/
package org.apache.ignite.ml.dataset.feature;
| apache-2.0 |
mrdomino/bazel | src/main/java/com/google/devtools/build/lib/rules/repository/WorkspaceBaseRule.java | 1897 | // Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.repository;
import static com.google.devtools.build.lib.packages.Attribute.attr;
import static com.google.devtools.build.lib.syntax.Type.STRING;
import com.google.devtools.build.lib.analysis.RuleDefinition;
import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment;
import com.google.devtools.build.lib.packages.RuleClass;
import com.google.devtools.build.lib.packages.RuleClass.Builder;
import com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType;
/**
* Base rule for rules in the WORKSPACE file.
*/
public class WorkspaceBaseRule implements RuleDefinition {
@Override
public RuleClass build(Builder builder, RuleDefinitionEnvironment environment) {
return builder
.exemptFromConstraintChecking("workspace rules aren't built for target environments")
.add(attr("generator_name", STRING).undocumented("internal"))
.add(attr("generator_function", STRING).undocumented("internal"))
.add(attr("generator_location", STRING).undocumented("internal"))
.build();
}
@Override
public Metadata getMetadata() {
return RuleDefinition.Metadata.builder()
.name("$workspace_base_rule")
.type(RuleClassType.ABSTRACT)
.build();
}
}
| apache-2.0 |
OnePaaS/elasticsearch | src/main/java/org/elasticsearch/search/sort/GeoDistanceSortBuilder.java | 6029 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.FilterBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
/**
* A geo distance based sorting on a geo point like field.
*/
public class GeoDistanceSortBuilder extends SortBuilder {
final String fieldName;
private final List<GeoPoint> points = new ArrayList<>();
private final List<String> geohashes = new ArrayList<>();
private GeoDistance geoDistance;
private DistanceUnit unit;
private SortOrder order;
private String sortMode;
private FilterBuilder nestedFilter;
private String nestedPath;
/**
* Constructs a new distance based sort on a geo point like field.
*
* @param fieldName The geo point like field name.
*/
public GeoDistanceSortBuilder(String fieldName) {
this.fieldName = fieldName;
}
/**
* The point to create the range distance facets from.
*
* @param lat latitude.
* @param lon longitude.
*/
public GeoDistanceSortBuilder point(double lat, double lon) {
points.add(new GeoPoint(lat, lon));
return this;
}
/**
* The point to create the range distance facets from.
*
* @param points reference points.
*/
public GeoDistanceSortBuilder points(GeoPoint... points) {
this.points.addAll(Arrays.asList(points));
return this;
}
/**
* The geohash of the geo point to create the range distance facets from.
*/
public GeoDistanceSortBuilder geohashes(String... geohashes) {
this.geohashes.addAll(Arrays.asList(geohashes));
return this;
}
/**
* The geo distance type used to compute the distance.
*/
public GeoDistanceSortBuilder geoDistance(GeoDistance geoDistance) {
this.geoDistance = geoDistance;
return this;
}
/**
* The distance unit to use. Defaults to {@link org.elasticsearch.common.unit.DistanceUnit#KILOMETERS}
*/
public GeoDistanceSortBuilder unit(DistanceUnit unit) {
this.unit = unit;
return this;
}
/**
* The order of sorting. Defaults to {@link SortOrder#ASC}.
*/
@Override
public GeoDistanceSortBuilder order(SortOrder order) {
this.order = order;
return this;
}
/**
* Not relevant.
*/
@Override
public SortBuilder missing(Object missing) {
return this;
}
/**
* Defines which distance to use for sorting in the case a document contains multiple geo points.
* Possible values: min and max
*/
public GeoDistanceSortBuilder sortMode(String sortMode) {
this.sortMode = sortMode;
return this;
}
/**
* Sets the nested filter that the nested objects should match with in order to be taken into account
* for sorting.
*/
public GeoDistanceSortBuilder setNestedFilter(FilterBuilder nestedFilter) {
this.nestedFilter = nestedFilter;
return this;
}
/**
* Sets the nested path if sorting occurs on a field that is inside a nested object. By default when sorting on a
* field inside a nested object, the nearest upper nested object is selected as nested path.
*/
public GeoDistanceSortBuilder setNestedPath(String nestedPath) {
this.nestedPath = nestedPath;
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("_geo_distance");
if (geohashes.size() == 0 && points.size() == 0) {
throw new ElasticsearchParseException("No points provided for _geo_distance sort.");
}
if (geohashes.size() == 1 && points.size() == 0) {
builder.field(fieldName, geohashes.get(0));
} else if (geohashes.size() == 1 && points.size() == 0) {
builder.field(fieldName, points.get(0));
} else {
builder.startArray(fieldName);
for (GeoPoint point : points) {
builder.value(point);
}
for (String geohash : geohashes) {
builder.value(geohash);
}
builder.endArray();
}
if (unit != null) {
builder.field("unit", unit);
}
if (geoDistance != null) {
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
}
if (order == SortOrder.DESC) {
builder.field("reverse", true);
}
if (sortMode != null) {
builder.field("mode", sortMode);
}
if (nestedPath != null) {
builder.field("nested_path", nestedPath);
}
if (nestedFilter != null) {
builder.field("nested_filter", nestedFilter, params);
}
builder.endObject();
return builder;
}
}
| apache-2.0 |
medicayun/medicayundicom | dcm4jboss-all/trunk/dcm4jboss-web/src/java/org/dcm4chex/archive/web/maverick/mwl/MWLScuDelegate.java | 6768 | /* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), available at http://sourceforge.net/projects/dcm4che.
*
* The Initial Developer of the Original Code is
* TIANI Medgraph AG.
* Portions created by the Initial Developer are Copyright (C) 2003-2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gunter Zeilinger <gunter.zeilinger@tiani.com>
* Franz Willer <franz.willer@gwi-ag.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4chex.archive.web.maverick.mwl;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import javax.servlet.ServletConfig;
import org.apache.log4j.Logger;
import org.dcm4che.data.Command;
import org.dcm4che.data.Dataset;
import org.dcm4che.dict.Tags;
import org.jboss.mx.util.MBeanServerLocator;
/**
* @author franz.willer
* @version $Revision: 14729 $ $Date: 2011-01-19 15:45:38 +0800 (周三, 19 1月 2011) $
*/
public class MWLScuDelegate {
private static ObjectName mwlScuServiceName = null;
private static ObjectName contentEditServiceName = null;
private static MBeanServer server;
private static Logger log = Logger
.getLogger(MWLScuDelegate.class.getName());
/**
* Iinitialize the MWLScu service delegator.
* <p>
* Set the name of the MwlScuService MBean with the servlet config param
* 'mwlScuServiceName'.
*
* @param config
* The ServletConfig object.
*/
public void init(ServletConfig config) {
if (server != null)
return;
server = MBeanServerLocator.locate();
String s = config.getInitParameter("mwlScuServiceName");
try {
mwlScuServiceName = new ObjectName(s);
s = config.getInitParameter("contentEditName");
contentEditServiceName = new ObjectName(s);
} catch (Exception e) {
log.error("Exception in init! ", e);
}
}
public Logger getLogger() {
return log;
}
/**
* Makes the MBean call to get the list of worklist entries for given filter
* (ds).
*
* @param ds
* @return The list of worklist entries ( Each item in the list is a Dataset
* of one scheduled procedure step).
*/
public List findMWLEntries(Dataset ds) {
List resp = new ArrayList();
try {
server.invoke(mwlScuServiceName, "findMWLEntries",
new Object[] { ds, Boolean.FALSE, resp },
new String[] { Dataset.class.getName(), boolean.class.getName(), List.class.getName() });
} catch (Exception x) {
log.error("Exception occured in findMWLEntries: " + x.getMessage(),
x);
}
return resp;
}
/**
* Checks if the MwlScpAET is local.
* <p>
* This means, that the MWLSCP is in the same container.
* <p>
* If it runs in the same container, the query can be done directly without
* a CFIND. Also we can allow deletion of MWLEntries.
*
* @return true if the MWLSCP runs in the same container.
*/
public boolean isLocal() {
try {
Boolean b = (Boolean) server.getAttribute(mwlScuServiceName,
"Local");
return b.booleanValue();
} catch (Exception x) {
log.error("Exception occured in isLocal: " + x.getMessage(), x);
}
return false;
}
/**
* Deletes an MWL entry with given id.
* <p>
* This method should only be called if isLocal() returns true!
*
* @param spsID
* The ID of the MWLEntry (Scheduled Procedure Step ID)
* @return
*/
public boolean deleteMWLEntry(String spsID) {
try {
Object o = server.invoke(mwlScuServiceName, "deleteMWLEntry",
new Object[] { spsID }, new String[] { String.class
.getName() });
return ((Boolean) o).booleanValue();
} catch (Exception x) {
log.error("Exception occured in deleteMWLEntry: " + x.getMessage(),
x);
}
return false;
}
public Map linkMppsToMwl( String[] spsIDs, String[] mppsIUIDs ) {
try {
Map map = (Map) server.invoke(contentEditServiceName, "linkMppsToMwl",
new Object[] { spsIDs, mppsIUIDs },
new String[] { String[].class.getName(), String[].class.getName() });
return map;
} catch (Exception x) {
log.error("Exception occured in linkMppsToMwl: " + x.getMessage(), x);
return null;
}
}
public Map linkMppsToMwl( Dataset[] mwlDs, String[] mppsIUIDs ) {
try {
Map map = (Map) server.invoke(contentEditServiceName, "linkMppsToMwl",
new Object[] { mwlDs, mppsIUIDs },
new String[] { Dataset[].class.getName(), String[].class.getName() });
return map;
} catch (Exception x) {
log.error("Exception occured in linkMppsToMwl: " + x.getMessage(), x);
return null;
}
}
}
| apache-2.0 |
cleliameneghin/sling | contrib/extensions/tenant/src/test/java/org/apache/sling/tenant/internal/TenantProviderImplTest.java | 2241 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.tenant.internal;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Iterator;
import junit.framework.TestCase;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ResourceResolverFactory;
import org.apache.sling.tenant.Tenant;
import org.junit.Test;
import org.mockito.Mockito;
import org.osgi.framework.BundleContext;
public class TenantProviderImplTest {
@Test
public void testListTenantsWithoutTenantRoot() throws Exception {
TenantProviderImpl provider = new TenantProviderImpl();
final ResourceResolverFactory rrf = Mockito.mock(ResourceResolverFactory.class);
final BundleContext context = Mockito.mock(BundleContext.class);
final ResourceResolver rr = Mockito.mock(ResourceResolver.class);
Mockito.when(rrf.getServiceResourceResolver(
Mockito.anyMapOf(String.class, Object.class))).thenReturn(rr);
set(provider, "factory", rrf);
provider.activate(context, new HashMap<String, Object>());
Iterator<Tenant> tenants = provider.getTenants();
TestCase.assertNotNull(tenants);
TestCase.assertFalse(tenants.hasNext());
}
private static void set(Object o, String name, Object value) throws Exception {
final Field f = o.getClass().getDeclaredField(name);
f.setAccessible(true);
f.set(o, value);
}
}
| apache-2.0 |
bhutchinson/rice | rice-middleware/core-service/impl/src/main/java/org/kuali/rice/coreservice/impl/style/StyleDataExporter.java | 2645 | /**
* Copyright 2005-2015 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.coreservice.impl.style;
import org.kuali.rice.core.api.CoreApiServiceLocator;
import org.kuali.rice.core.api.impex.ExportDataSet;
import org.kuali.rice.coreservice.impl.style.StyleBo;
import org.kuali.rice.krad.bo.Exporter;
import org.kuali.rice.krad.exception.ExportNotSupportedException;
import org.kuali.rice.krad.util.KRADConstants;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
/**
* An implementation of the {@link Exporter} class which facilitates exporting
* of {@link StyleBo} data from the GUI.
*
* @see ExportDataSet
* @see StyleBo
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public class StyleDataExporter implements Exporter {
private List<String> supportedFormats = new ArrayList<String>();
public StyleDataExporter() {
supportedFormats.add(KRADConstants.XML_FORMAT);
}
@Override
public void export(Class<?> dataObjectClass,
List<? extends Object> dataObjects, String exportFormat,
OutputStream outputStream) throws IOException {
if (!KRADConstants.XML_FORMAT.equals(exportFormat)) {
throw new ExportNotSupportedException("The given export format of "
+ exportFormat
+ " is not supported by the KEW XML Exporter!");
}
ExportDataSet dataSet = buildExportDataSet(dataObjectClass, dataObjects);
outputStream.write(CoreApiServiceLocator.getXmlExporterService()
.export(dataSet));
outputStream.flush();
}
@Override
public List<String> getSupportedFormats(Class<?> dataObjectClass) {
return supportedFormats;
}
/**
* Builds the ExportDataSet based on the BusinessObjects passed in.
*/
protected ExportDataSet buildExportDataSet(Class<?> dataObjectClass,
List<? extends Object> dataObjects) {
StyleExportDataSet dataSet = new StyleExportDataSet();
for (Object dataObject : dataObjects) {
if (dataObjectClass.equals(StyleBo.class)) {
dataSet.getStyles().add((StyleBo) dataObject);
}
}
return dataSet.createExportDataSet();
}
}
| apache-2.0 |
Sandyarathi/Lab2gRPC | lib/netty/codec-memcache/src/main/java/io/netty/handler/codec/memcache/binary/BinaryMemcacheMessage.java | 4903 | /*
* Copyright 2013 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.memcache.binary;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.memcache.MemcacheMessage;
/**
* An interface that defines a binary Memcache message, providing common properties for
* {@link BinaryMemcacheRequest} and {@link BinaryMemcacheResponse}.
* <p/>
* A {@link BinaryMemcacheMessage} always consists of a header and optional extras or/and
* a key.
*
* @see BinaryMemcacheRequest
* @see BinaryMemcacheResponse
*/
public interface BinaryMemcacheMessage extends MemcacheMessage {
/**
* Returns the magic byte for the message.
*
* @return the magic byte.
*/
byte magic();
/**
* Sets the magic byte.
*
* @param magic the magic byte to use.
* @see BinaryMemcacheOpcodes for typesafe opcodes.
*/
BinaryMemcacheMessage setMagic(byte magic);
/**
* Returns the opcode for the message.
*
* @return the opcode.
*/
byte opcode();
/**
* Sets the opcode for the message.
*
* @param code the opcode to use.
*/
BinaryMemcacheMessage setOpcode(byte code);
/**
* Returns the key length of the message.
* <p/>
* This may return 0, since the key is optional.
*
* @return the key length.
*/
short keyLength();
/**
* Set the key length of the message.
* <p/>
* This may be 0, since the key is optional.
*
* @param keyLength the key length to use.
*/
BinaryMemcacheMessage setKeyLength(short keyLength);
/**
* Return the extras length of the message.
* <p/>
* This may be 0, since the extras content is optional.
*
* @return the extras length.
*/
byte extrasLength();
/**
* Set the extras length of the message.
* <p/>
* This may be 0, since the extras content is optional.
*
* @param extrasLength the extras length.
*/
BinaryMemcacheMessage setExtrasLength(byte extrasLength);
/**
* Returns the data type of the message.
*
* @return the data type of the message.
*/
byte dataType();
/**
* Sets the data type of the message.
*
* @param dataType the data type of the message.
*/
BinaryMemcacheMessage setDataType(byte dataType);
/**
* Returns the total body length.
* <p/>
* Note that this may be 0, since the body is optional.
*
* @return the total body length.
*/
int totalBodyLength();
/**
* Sets the total body length.
* <p/>
* Note that this may be 0, since the body length is optional.
*
* @param totalBodyLength the total body length.
*/
BinaryMemcacheMessage setTotalBodyLength(int totalBodyLength);
/**
* Returns the opaque value.
*
* @return the opaque value.
*/
int opaque();
/**
* Sets the opaque value.
*
* @param opaque the opqaue value to use.
*/
BinaryMemcacheMessage setOpaque(int opaque);
/**
* Returns the CAS identifier.
*
* @return the CAS identifier.
*/
long cas();
/**
* Sets the CAS identifier.
*
* @param cas the CAS identifier to use.
*/
BinaryMemcacheMessage setCas(long cas);
/**
* Returns the optional key of the document.
*
* @return the key of the document.
*/
String key();
/**
* Sets the key of the document.
*
* @param key the key of the message.
*/
BinaryMemcacheMessage setKey(String key);
/**
* Returns a {@link ByteBuf} representation of the optional extras.
*
* @return the optional extras.
*/
ByteBuf extras();
/**
* Sets the extras buffer on the message.
*
* @param extras the extras buffer of the document.
*/
BinaryMemcacheMessage setExtras(ByteBuf extras);
/**
* Increases the reference count by {@code 1}.
*/
@Override
BinaryMemcacheMessage retain();
/**
* Increases the reference count by the specified {@code increment}.
*/
@Override
BinaryMemcacheMessage retain(int increment);
@Override
BinaryMemcacheMessage touch();
@Override
BinaryMemcacheMessage touch(Object hint);
}
| bsd-3-clause |
rokn/Count_Words_2015 | testing/drools-master/drools-compiler/src/test/java/org/drools/compiler/integrationtests/waltz/Line.java | 1228 | /*
* Copyright 2005 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.integrationtests.waltz;
public class Line {
private int p1;
private int p2;
public Line() {
}
public Line(final int p1,
final int p2) {
this.p1 = p1;
this.p2 = p2;
}
public int getP1() {
return this.p1;
}
public void setP1(final int p1) {
this.p1 = p1;
}
public int getP2() {
return this.p2;
}
public void setP2(final int p2) {
this.p2 = p2;
}
public String toString() {
return "{Line p1=" + this.p1 + ", p2=" + this.p2 + "}";
}
}
| mit |
alexVengrovsk/che | plugins/plugin-java/che-plugin-java-ext-jdt/org-eclipse-jface-text/src/main/java/org/eclipse/che/jface/text/contentassist/ICompletionProposalExtension2.java | 2663 | /*******************************************************************************
* Copyright (c) 2000, 2005 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.che.jface.text.contentassist;
import org.eclipse.jface.text.DocumentEvent;
import org.eclipse.jface.text.IDocument;
import org.eclipse.che.jface.text.ITextViewer;
/**
* Extends {@link ICompletionProposal}
* with the following functions:
* <ul>
* <li>handling of trigger characters with modifiers</li>
* <li>visual indication for selection of a proposal</li>
* </ul>
*
* @since 2.1
*/
public interface ICompletionProposalExtension2 {
/**
* Applies the proposed completion to the given document. The insertion
* has been triggered by entering the given character with a modifier at the given offset.
* This method assumes that {@link #validate(org.eclipse.jface.text.IDocument, int, org.eclipse.jface.text.DocumentEvent)}
* returns <code>true</code> if called for <code>offset</code>.
*
* @param viewer the text viewer into which to insert the proposed completion
* @param trigger the trigger to apply the completion
* @param stateMask the state mask of the modifiers
* @param offset the offset at which the trigger has been activated
*/
void apply(ITextViewer viewer, char trigger, int stateMask, int offset);
/**
* Called when the proposal is selected.
*
* @param viewer the text viewer.
* @param smartToggle the smart toggle key was pressed
*/
void selected(ITextViewer viewer, boolean smartToggle);
/**
* Called when the proposal is unselected.
*
* @param viewer the text viewer.
*/
void unselected(ITextViewer viewer);
/**
* Requests the proposal to be validated with respect to the document event.
* If the proposal cannot be validated, the methods returns <code>false</code>.
* If the document event was <code>null</code>, only the caret offset was changed, but not the document.
*
* This method replaces {@link ICompletionProposalExtension#isValidFor(org.eclipse.jface.text.IDocument, int)}
*
* @param document the document
* @param offset the caret offset
* @param event the document event, may be <code>null</code>
* @return boolean
*/
boolean validate(IDocument document, int offset, DocumentEvent event);
}
| epl-1.0 |
Taichi-SHINDO/jdk9-jdk | test/javax/swing/JTabbedPane/8007563/Test8007563.java | 4303 | /*
* Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import java.awt.Color;
import java.awt.Point;
import java.awt.Robot;
import java.util.ArrayList;
import java.util.concurrent.CountDownLatch;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JTabbedPane;
import static javax.swing.UIManager.*;
import static javax.swing.SwingUtilities.*;
/*
* @test
* @bug 8007563
* @summary Tests JTabbedPane background
* @author Sergey Malenkov
*/
public class Test8007563 implements Runnable {
private static final ArrayList<String> LIST = new ArrayList<>();
private static final LookAndFeelInfo[] INFO = getInstalledLookAndFeels();
private static final CountDownLatch LATCH = new CountDownLatch(INFO.length);
private static Robot ROBOT;
public static void main(String[] args) throws Exception {
ROBOT = new Robot();
invokeLater(new Test8007563());
LATCH.await();
if (!LIST.isEmpty()) {
throw new Error(LIST.toString());
}
}
private static void addOpaqueError(boolean opaque) {
LIST.add(getLookAndFeel().getName() + " opaque=" + opaque);
}
private static boolean updateLookAndFeel() {
int index = (int) LATCH.getCount() - 1;
if (index >= 0) {
try {
LookAndFeelInfo info = INFO[index];
System.err.println("L&F: " + info.getName());
setLookAndFeel(info.getClassName());
return true;
} catch (Exception exception) {
exception.printStackTrace();
}
}
return false;
}
private JFrame frame;
private JTabbedPane pane;
public void run() {
if (this.frame == null) {
if (!updateLookAndFeel()) {
return;
}
this.pane = new JTabbedPane();
this.pane.setOpaque(false);
this.pane.setBackground(Color.RED);
for (int i = 0; i < 3; i++) {
this.pane.addTab("Tab " + i, new JLabel("Content area " + i));
}
this.frame = new JFrame(getClass().getSimpleName());
this.frame.getContentPane().setBackground(Color.BLUE);
this.frame.add(this.pane);
this.frame.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
this.frame.setSize(400, 200);
this.frame.setLocationRelativeTo(null);
this.frame.setVisible(true);
} else {
Point point = new Point(this.pane.getWidth() - 2, 2);
convertPointToScreen(point, this.pane);
Color actual = ROBOT.getPixelColor(point.x, point.y);
boolean opaque = this.pane.isOpaque();
Color expected = opaque
? this.pane.getBackground()
: this.frame.getContentPane().getBackground();
if (!expected.equals(actual)){
addOpaqueError(opaque);
}
if (!opaque) {
this.pane.setOpaque(true);
this.pane.repaint();
} else {
this.frame.dispose();
this.frame = null;
this.pane = null;
LATCH.countDown();
}
}
invokeLater(this);
}
}
| gpl-2.0 |
xkollar/spacewalk | java/code/src/com/redhat/rhn/frontend/action/renderers/InactiveSystemsRenderer.java | 3612 | /**
* Copyright (c) 2009--2014 Red Hat, Inc.
*
* This software is licensed to you under the GNU General Public License,
* version 2 (GPLv2). There is NO WARRANTY for this software, express or
* implied, including the implied warranties of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
* along with this software; if not, see
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
*
* Red Hat trademarks are not licensed under GPLv2. No permission is
* granted to use or replicate Red Hat trademarks that are incorporated
* in this software or its documentation.
*/
package com.redhat.rhn.frontend.action.renderers;
import com.redhat.rhn.common.db.datasource.DataResult;
import com.redhat.rhn.common.localization.LocalizationService;
import com.redhat.rhn.domain.user.User;
import com.redhat.rhn.frontend.dto.SystemOverview;
import com.redhat.rhn.frontend.listview.PageControl;
import com.redhat.rhn.manager.system.SystemManager;
import java.util.Iterator;
import javax.servlet.http.HttpServletRequest;
/**
* Renders YourRhn fragment for inactive systems
*
* @version $Rev$
*/
public class InactiveSystemsRenderer extends BaseFragmentRenderer {
private static final String INACTIVE_SYSTEM_LIST = "inactiveSystemList";
private static final String INACTIVE_SYSTEMS_EMPTY = "inactiveSystemsEmpty";
private static final String INACTIVE_SYSTEMS_CLASS = "inactiveSystemsClass";
/**
* {@inheritDoc}
*/
protected void render(User user, PageControl pc, HttpServletRequest request) {
LocalizationService ls = LocalizationService.getInstance();
DataResult<SystemOverview> isdr =
SystemManager.inactiveListSortbyCheckinTime(user, pc);
String inactiveSystemCSSTable = null;
if (!isdr.isEmpty()) {
for (Iterator<SystemOverview> i = isdr.iterator(); i.hasNext();) {
SystemOverview so = i.next();
StringBuilder buffer = new StringBuilder();
Long lastCheckin = so.getLastCheckinDaysAgo();
if (lastCheckin.compareTo(new Long(1)) < 0) {
buffer.append(lastCheckin * 24);
buffer.append(' ');
buffer.append(ls.getMessage("filter-form.jspf.hours"));
}
else if (lastCheckin.compareTo(new Long(7)) < 0) {
buffer.append(so.getLastCheckinDaysAgo().longValue());
buffer.append(' ');
buffer.append(ls.getMessage("filter-form.jspf.days"));
}
else if (lastCheckin.compareTo(new Long(7)) >= 0) {
buffer.append(lastCheckin.longValue() / 7);
buffer.append(' ');
buffer.append(ls.getMessage("filter-form.jspf.weeks"));
}
so.setLastCheckinString(buffer.toString());
}
request.setAttribute(INACTIVE_SYSTEM_LIST, isdr);
}
else {
inactiveSystemCSSTable = RendererHelper.makeEmptyTable(true,
"inactivelist.jsp.header",
"yourrhn.jsp.noinactivesystems");
request.setAttribute(INACTIVE_SYSTEMS_EMPTY, inactiveSystemCSSTable);
}
RendererHelper.setTableStyle(request, INACTIVE_SYSTEMS_CLASS);
}
/**
* {@inheritDoc}
*/
protected String getPageUrl() {
return "/WEB-INF/pages/common/fragments/yourrhn/inactiveSystems.jsp";
}
}
| gpl-2.0 |
filius/asterisk-java | src/main/java/org/asteriskjava/manager/event/DisconnectEvent.java | 1214 | /*
* Copyright 2004-2006 Stefan Reuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.asteriskjava.manager.event;
/**
* A DisconnectEvent is triggered when the connection to the asterisk server is lost.<p>
* It is a pseudo event not directly related to an Asterisk generated event.
*
* @author srt
* @version $Id$
* @see org.asteriskjava.manager.event.ConnectEvent
*/
public class DisconnectEvent extends ManagerEvent
{
/**
* Serializable version identifier.
*/
private static final long serialVersionUID = 3689355411862206518L;
/**
* @param source
*/
public DisconnectEvent(Object source)
{
super(source);
}
}
| apache-2.0 |
weebl2000/modeshape | modeshape-jcr/src/main/java/org/modeshape/jcr/query/xpath/XPathQueryParser.java | 2338 | /*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.jcr.query.xpath;
import javax.jcr.query.Query;
import org.modeshape.common.text.ParsingException;
import org.modeshape.jcr.query.model.QueryCommand;
import org.modeshape.jcr.query.model.TypeSystem;
import org.modeshape.jcr.query.parse.InvalidQueryException;
import org.modeshape.jcr.query.parse.QueryParser;
import org.modeshape.jcr.query.xpath.XPath.Component;
/**
* A {@link QueryParser} implementation that accepts XPath expressions and converts them to a {@link QueryCommand ModeShape
* Abstract Query Model} representation.
*/
public class XPathQueryParser implements QueryParser {
static final boolean COLLAPSE_INNER_COMPONENTS = true;
@SuppressWarnings( "deprecation" )
private static final String LANGUAGE = Query.XPATH;
@Override
public String getLanguage() {
return LANGUAGE;
}
@Override
public String toString() {
return LANGUAGE;
}
@Override
public int hashCode() {
return LANGUAGE.hashCode();
}
@Override
public boolean equals( Object obj ) {
if (obj == this) return true;
if (obj instanceof QueryParser) {
QueryParser that = (QueryParser)obj;
return this.getLanguage().equals(that.getLanguage());
}
return false;
}
@Override
public QueryCommand parseQuery( String query,
TypeSystem typeSystem ) throws InvalidQueryException, ParsingException {
Component xpath = new XPathParser(typeSystem).parseXPath(query);
// Convert the result into a QueryCommand ...
QueryCommand command = new XPathToQueryTranslator(typeSystem, query).createQuery(xpath);
return command;
}
}
| apache-2.0 |
rscrimojr/incubator-trafficcontrol | traffic_router/core/src/main/java/com/comcast/cdn/traffic_control/traffic_router/core/dns/DNSAccessRecord.java | 3641 | /*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.cdn.traffic_control.traffic_router.core.dns;
import com.comcast.cdn.traffic_control.traffic_router.geolocation.Geolocation;
import com.comcast.cdn.traffic_control.traffic_router.core.router.StatTracker.Track.ResultDetails;
import com.comcast.cdn.traffic_control.traffic_router.core.router.StatTracker.Track.ResultType;
import org.xbill.DNS.Message;
import java.net.InetAddress;
// Using Josh Bloch Builder pattern so suppress these warnings.
@SuppressWarnings({"PMD.MissingStaticMethodInNonInstantiatableClass",
"PMD.AccessorClassGeneration",
"PMD.CyclomaticComplexity"})
public final class DNSAccessRecord {
private final long queryInstant;
private final InetAddress client;
private final Message dnsMessage;
private final ResultType resultType;
private final ResultDetails resultDetails;
private final Geolocation resultLocation;
private final long requestNanoTime;
public long getQueryInstant() {
return queryInstant;
}
public InetAddress getClient() {
return client;
}
public Message getDnsMessage() {
return dnsMessage;
}
public ResultType getResultType() {
return resultType;
}
public ResultDetails getResultDetails() {
return resultDetails;
}
public Geolocation getResultLocation() {
return resultLocation;
}
public long getRequestNanoTime() {
return requestNanoTime;
}
public static class Builder {
private final long queryInstant;
private final InetAddress client;
private Message dnsMessage;
private ResultType resultType;
private ResultDetails resultDetails;
private Geolocation resultLocation;
private final long requestNanoTime;
public Builder(final long queryInstant, final InetAddress client) {
this.queryInstant = queryInstant;
this.client = client;
this.requestNanoTime = System.nanoTime();
}
public Builder dnsMessage(final Message query) {
this.dnsMessage = query;
return this;
}
public Builder resultType(final ResultType resultType) {
this.resultType = resultType;
return this;
}
public Builder resultDetails(final ResultDetails resultDetails) {
this.resultDetails = resultDetails;
return this;
}
public Builder resultLocation(final Geolocation resultLocation) {
this.resultLocation = resultLocation;
return this;
}
public DNSAccessRecord build() {
return new DNSAccessRecord(this);
}
}
private DNSAccessRecord(final Builder builder) {
queryInstant = builder.queryInstant;
client = builder.client;
dnsMessage = builder.dnsMessage;
resultType = builder.resultType;
resultDetails = builder.resultDetails;
resultLocation = builder.resultLocation;
requestNanoTime = builder.requestNanoTime;
}
}
| apache-2.0 |
GJL/flink | flink-filesystems/flink-swift-fs-hadoop/src/main/java/org/apache/hadoop/util/NativeCodeLoader.java | 3158 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
/**
* A helper to load the native hadoop code i.e. libhadoop.so.
* This handles the fallback to either the bundled libhadoop-Linux-i386-32.so
* or the default java implementations where appropriate.
*
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class NativeCodeLoader {
private static final Log LOG =
LogFactory.getLog(NativeCodeLoader.class);
private static boolean nativeCodeLoaded = false;
static {
LOG.info("Skipping native-hadoop library for flink-openstack-fs-hadoop's relocated Hadoop... " +
"using builtin-java classes where applicable");
}
/**
* Check if native-hadoop code is loaded for this platform.
*
* @return <code>true</code> if native-hadoop is loaded,
* else <code>false</code>
*/
public static boolean isNativeCodeLoaded() {
return nativeCodeLoaded;
}
/**
* Returns true only if this build was compiled with support for snappy.
*/
public static native boolean buildSupportsSnappy();
/**
* Returns true only if this build was compiled with support for openssl.
*/
public static native boolean buildSupportsOpenssl();
public static native String getLibraryName();
/**
* Return if native hadoop libraries, if present, can be used for this job.
* @param conf configuration
*
* @return <code>true</code> if native hadoop libraries, if present, can be
* used for this job; <code>false</code> otherwise.
*/
public boolean getLoadNativeLibraries(Configuration conf) {
return conf.getBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
}
/**
* Set if native hadoop libraries, if present, can be used for this job.
*
* @param conf configuration
* @param loadNativeLibraries can native hadoop libraries be loaded
*/
public void setLoadNativeLibraries(Configuration conf,
boolean loadNativeLibraries) {
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
loadNativeLibraries);
}
}
| apache-2.0 |
nmelnick/aws-sdk-java | aws-java-sdk-machinelearning/src/main/java/com/amazonaws/services/machinelearning/model/DeleteMLModelRequest.java | 3534 | /*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.machinelearning.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
*/
public class DeleteMLModelRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable {
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>MLModel</code>.
* </p>
*/
private String mLModelId;
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>MLModel</code>.
* </p>
*
* @param mLModelId
* A user-supplied ID that uniquely identifies the
* <code>MLModel</code>.
*/
public void setMLModelId(String mLModelId) {
this.mLModelId = mLModelId;
}
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>MLModel</code>.
* </p>
*
* @return A user-supplied ID that uniquely identifies the
* <code>MLModel</code>.
*/
public String getMLModelId() {
return this.mLModelId;
}
/**
* <p>
* A user-supplied ID that uniquely identifies the <code>MLModel</code>.
* </p>
*
* @param mLModelId
* A user-supplied ID that uniquely identifies the
* <code>MLModel</code>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public DeleteMLModelRequest withMLModelId(String mLModelId) {
setMLModelId(mLModelId);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getMLModelId() != null)
sb.append("MLModelId: " + getMLModelId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DeleteMLModelRequest == false)
return false;
DeleteMLModelRequest other = (DeleteMLModelRequest) obj;
if (other.getMLModelId() == null ^ this.getMLModelId() == null)
return false;
if (other.getMLModelId() != null
&& other.getMLModelId().equals(this.getMLModelId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getMLModelId() == null) ? 0 : getMLModelId().hashCode());
return hashCode;
}
@Override
public DeleteMLModelRequest clone() {
return (DeleteMLModelRequest) super.clone();
}
} | apache-2.0 |
punkhorn/camel-upstream | core/camel-base/src/main/java/org/apache/camel/impl/health/AbstractHealthCheck.java | 8699 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl.health;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.camel.health.HealthCheck;
import org.apache.camel.health.HealthCheckConfiguration;
import org.apache.camel.health.HealthCheckResultBuilder;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractHealthCheck implements HealthCheck {
public static final String CHECK_ID = "check.id";
public static final String CHECK_GROUP = "check.group";
public static final String CHECK_ENABLED = "check.enabled";
public static final String INVOCATION_COUNT = "invocation.count";
public static final String INVOCATION_TIME = "invocation.time";
public static final String INVOCATION_ATTEMPT_TIME = "invocation.attempt.time";
public static final String FAILURE_COUNT = "failure.count";
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractHealthCheck.class);
private final Object lock;
private final String group;
private final String id;
private final ConcurrentMap<String, Object> meta;
private HealthCheckConfiguration configuration;
private HealthCheck.Result lastResult;
private ZonedDateTime lastInvocation;
protected AbstractHealthCheck(String id) {
this(null, id, null);
}
protected AbstractHealthCheck(String group, String id) {
this(group, id, null);
}
protected AbstractHealthCheck(String group, String id, Map<String, Object> meta) {
this.lock = new Object();
this.group = group;
this.id = ObjectHelper.notNull(id, "HealthCheck ID");
this.configuration = new HealthCheckConfiguration();
this.meta = new ConcurrentHashMap<>();
if (meta != null) {
this.meta.putAll(meta);
}
this.meta.put(CHECK_ID, id);
if (group != null) {
this.meta.putIfAbsent(CHECK_GROUP, group);
}
}
@Override
public String getId() {
return id;
}
@Override
public String getGroup() {
return group;
}
@Override
public Map<String, Object> getMetaData() {
return Collections.unmodifiableMap(this.meta);
}
@Override
public HealthCheckConfiguration getConfiguration() {
return this.configuration;
}
public void setConfiguration(HealthCheckConfiguration configuration) {
this.configuration = configuration;
}
@Override
public Result call() {
return call(Collections.emptyMap());
}
@Override
public Result call(Map<String, Object> options) {
synchronized (lock) {
final HealthCheckConfiguration conf = getConfiguration();
final HealthCheckResultBuilder builder = HealthCheckResultBuilder.on(this);
final ZonedDateTime now = ZonedDateTime.now();
final boolean enabled = ObjectHelper.supplyIfEmpty(conf.isEnabled(), HealthCheckConfiguration::defaultValueEnabled);
final Duration interval = ObjectHelper.supplyIfEmpty(conf.getInterval(), HealthCheckConfiguration::defaultValueInterval);
final Integer threshold = ObjectHelper.supplyIfEmpty(conf.getFailureThreshold(), HealthCheckConfiguration::defaultValueFailureThreshold);
// Extract relevant information from meta data.
int invocationCount = (Integer)meta.getOrDefault(INVOCATION_COUNT, 0);
int failureCount = (Integer)meta.getOrDefault(FAILURE_COUNT, 0);
String invocationTime = now.format(DateTimeFormatter.ISO_ZONED_DATE_TIME);
boolean call = true;
// Set common meta-data
meta.put(INVOCATION_ATTEMPT_TIME, invocationTime);
if (!enabled) {
LOGGER.debug("health-check {}/{} won't be invoked as not enabled", getGroup(), getId());
builder.message("Disabled");
builder.detail(CHECK_ENABLED, false);
return builder.unknown().build();
}
// check if the last invocation is far enough to have this check invoked
// again without violating the interval configuration.
if (lastResult != null && lastInvocation != null && !interval.isZero()) {
Duration elapsed = Duration.between(lastInvocation, now);
if (elapsed.compareTo(interval) < 0) {
LOGGER.debug("health-check {}/{} won't be invoked as interval ({}) is not yet expired (last-invocation={})",
getGroup(),
getId(),
elapsed,
lastInvocation);
call = false;
}
}
// Invoke the check.
if (call) {
LOGGER.debug("Invoke health-check {}/{}", getGroup(), getId());
doCall(builder, options);
// State should be set here
ObjectHelper.notNull(builder.state(), "Response State");
if (builder.state() == State.DOWN) {
// If the service is un-healthy but the number of time it
// has been consecutively reported in this state is less
// than the threshold configured, mark it as UP. This is
// used to avoid false positive in case of glitches.
if (failureCount++ < threshold) {
LOGGER.debug("Health-check {}/{} has status DOWN but failure count ({}) is less than configured threshold ({})",
getGroup(),
getId(),
failureCount,
threshold);
builder.up();
}
} else {
failureCount = 0;
}
meta.put(INVOCATION_TIME, invocationTime);
meta.put(FAILURE_COUNT, failureCount);
meta.put(INVOCATION_COUNT, ++invocationCount);
// Copy some of the meta-data bits to the response attributes so the
// response caches the health-check state at the time of the invocation.
builder.detail(INVOCATION_TIME, meta.get(INVOCATION_TIME));
builder.detail(INVOCATION_COUNT, meta.get(INVOCATION_COUNT));
builder.detail(FAILURE_COUNT, meta.get(FAILURE_COUNT));
// update last invocation time.
lastInvocation = now;
} else if (lastResult != null) {
lastResult.getMessage().ifPresent(builder::message);
lastResult.getError().ifPresent(builder::error);
builder.state(lastResult.getState());
builder.details(lastResult.getDetails());
}
lastResult = builder.build();
return lastResult;
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AbstractHealthCheck check = (AbstractHealthCheck) o;
return id != null ? id.equals(check.id) : check.id == null;
}
@Override
public int hashCode() {
return id != null ? id.hashCode() : 0;
}
protected final void addMetaData(String key, Object value) {
meta.put(key, value);
}
/**
* Invoke the health check.
*
* @see {@link HealthCheck#call(Map)}
*/
protected abstract void doCall(HealthCheckResultBuilder builder, Map<String, Object> options);
}
| apache-2.0 |
mosoft521/spring-boot | spring-boot-test-autoconfigure/src/test/java/org/springframework/boot/test/autoconfigure/jooq/JooqTestWithAutoConfigureTestDatabaseIntegrationTests.java | 1791 | /*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.autoconfigure.jooq;
import javax.sql.DataSource;
import org.jooq.DSLContext;
import org.jooq.SQLDialect;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.jdbc.EmbeddedDatabaseConnection;
import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase;
import org.springframework.test.context.junit4.SpringRunner;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Integration tests for {@link JooqTest}.
*
* @author Stephane Nicoll
*/
@RunWith(SpringRunner.class)
@JooqTest
@AutoConfigureTestDatabase(connection = EmbeddedDatabaseConnection.H2)
public class JooqTestWithAutoConfigureTestDatabaseIntegrationTests {
@Autowired
private DSLContext dsl;
@Autowired
private DataSource dataSource;
@Test
public void replacesAutoConfiguredDataSource() throws Exception {
String product = this.dataSource.getConnection().getMetaData()
.getDatabaseProductName();
assertThat(product).startsWith("H2");
assertThat(this.dsl.configuration().dialect()).isEqualTo(SQLDialect.H2);
}
}
| apache-2.0 |
sriksun/falcon | falcon-regression/merlin/src/test/java/org/apache/falcon/regression/ExternalFSTest.java | 9637 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.falcon.regression;
import org.apache.commons.lang.StringUtils;
import org.apache.falcon.entity.v0.EntityType;
import org.apache.falcon.entity.v0.cluster.Interfacetype;
import org.apache.falcon.entity.v0.feed.ActionType;
import org.apache.falcon.entity.v0.feed.ClusterType;
import org.apache.falcon.regression.Entities.FeedMerlin;
import org.apache.falcon.regression.core.bundle.Bundle;
import org.apache.falcon.regression.core.enumsAndConstants.MerlinConstants;
import org.apache.falcon.regression.core.helpers.ColoHelper;
import org.apache.falcon.regression.core.util.AssertUtil;
import org.apache.falcon.regression.core.util.BundleUtil;
import org.apache.falcon.regression.core.util.HadoopUtil;
import org.apache.falcon.regression.core.util.InstanceUtil;
import org.apache.falcon.regression.core.util.MatrixUtil;
import org.apache.falcon.regression.core.util.OSUtil;
import org.apache.falcon.regression.core.util.OozieUtil;
import org.apache.falcon.regression.core.util.TimeUtil;
import org.apache.falcon.regression.core.util.Util;
import org.apache.falcon.regression.testHelper.BaseTestClass;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;
import org.apache.oozie.client.CoordinatorAction;
import org.apache.oozie.client.OozieClient;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import javax.xml.bind.JAXBException;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
/**
* Tests for operations with external file systems.
*/
@Test(groups = "embedded")
public class ExternalFSTest extends BaseTestClass{
public static final String WASB_END_POINT =
"wasb://" + MerlinConstants.WASB_CONTAINER + "@" + MerlinConstants.WASB_ACCOUNT;
private ColoHelper cluster = servers.get(0);
private FileSystem clusterFS = serverFS.get(0);
private OozieClient clusterOC = serverOC.get(0);
private FileSystem wasbFS;
private Bundle externalBundle;
private String baseTestDir = cleanAndGetTestDir();
private String sourcePath = baseTestDir + "/source";
private String baseWasbDir = "/falcon-regression/" + UUID.randomUUID().toString().split("-")[0];
private String testWasbTargetDir = baseWasbDir + '/'
+ UUID.randomUUID().toString().split("-")[0] + '/';
private static final Logger LOGGER = Logger.getLogger(ExternalFSTest.class);
@BeforeClass
public void setUpClass() throws IOException {
HadoopUtil.recreateDir(clusterFS, baseTestDir);
Configuration conf = new Configuration();
conf.set("fs.defaultFS", WASB_END_POINT);
conf.set("fs.azure.account.key." + MerlinConstants.WASB_ACCOUNT,
MerlinConstants.WASB_SECRET);
conf.setBoolean("fs.hdfs.impl.disable.cache", false);
wasbFS = FileSystem.get(conf);
LOGGER.info("creating base wasb dir" + baseWasbDir);
}
@BeforeMethod(alwaysRun = true)
public void setUp() throws JAXBException, IOException {
Bundle bundle = BundleUtil.readFeedReplicationBundle();
bundles[0] = new Bundle(bundle, cluster);
externalBundle = new Bundle(bundle, cluster);
bundles[0].generateUniqueBundle(this);
externalBundle.generateUniqueBundle(this);
LOGGER.info("checking wasb credentials with location: " + testWasbTargetDir);
wasbFS.create(new Path(testWasbTargetDir));
wasbFS.delete(new Path(testWasbTargetDir), true);
}
@AfterMethod
public void tearDown() throws IOException {
removeTestClassEntities();
wasbFS.delete(new Path(testWasbTargetDir), true);
}
@AfterClass(alwaysRun = true)
public void tearDownClass() throws IOException {
wasbFS.delete(new Path(baseWasbDir), true);
}
@Test(dataProvider = "getInvalidTargets")
public void invalidCredentialsExtFS(String endpoint) throws Exception {
bundles[0].setClusterInterface(Interfacetype.READONLY, endpoint);
bundles[0].setClusterInterface(Interfacetype.WRITE, endpoint);
AssertUtil.assertFailed(prism.getClusterHelper()
.submitEntity(bundles[0].getClusterElement().toString()));
}
@Test(dataProvider = "getData")
public void replicateToExternalFS(final FileSystem externalFS,
final String separator, final boolean withData) throws Exception {
final String endpoint = externalFS.getUri().toString();
Bundle.submitCluster(bundles[0], externalBundle);
String startTime = TimeUtil.getTimeWrtSystemTime(0);
String endTime = TimeUtil.addMinsToTime(startTime, 5);
LOGGER.info("Time range between : " + startTime + " and " + endTime);
String datePattern = StringUtils .join(
new String[]{"${YEAR}", "${MONTH}", "${DAY}", "${HOUR}", "${MINUTE}"}, separator);
//configure feed
FeedMerlin feed = new FeedMerlin(bundles[0].getDataSets().get(0));
String targetDataLocation = endpoint + testWasbTargetDir + datePattern;
feed.setFilePath(sourcePath + '/' + datePattern);
//erase all clusters from feed definition
feed.clearFeedClusters();
//set local cluster as source
feed.addFeedCluster(
new FeedMerlin.FeedClusterBuilder(Util.readEntityName(bundles[0].getClusters().get(0)))
.withRetention("days(1000000)", ActionType.DELETE)
.withValidity(startTime, endTime)
.withClusterType(ClusterType.SOURCE)
.build());
//set externalFS cluster as target
feed.addFeedCluster(
new FeedMerlin.FeedClusterBuilder(Util.readEntityName(externalBundle.getClusters().get(0)))
.withRetention("days(1000000)", ActionType.DELETE)
.withValidity(startTime, endTime)
.withClusterType(ClusterType.TARGET)
.withDataLocation(targetDataLocation)
.build());
//submit and schedule feed
LOGGER.info("Feed : " + Util.prettyPrintXml(feed.toString()));
AssertUtil.assertSucceeded(prism.getFeedHelper().submitAndSchedule(feed.toString()));
datePattern = StringUtils.join(new String[]{"yyyy", "MM", "dd", "HH", "mm"}, separator);
//upload necessary data
DateTime date = new DateTime(startTime, DateTimeZone.UTC);
DateTimeFormatter fmt = DateTimeFormat.forPattern(datePattern);
String timePattern = fmt.print(date);
HadoopUtil.recreateDir(clusterFS, sourcePath + '/' + timePattern);
if (withData) {
HadoopUtil.copyDataToFolder(clusterFS, sourcePath + '/' + timePattern, OSUtil.SINGLE_FILE);
}
Path srcPath = new Path(sourcePath + '/' + timePattern);
Path dstPath = new Path(endpoint + testWasbTargetDir + '/' + timePattern);
//check if coordinator exists
TimeUtil.sleepSeconds(10);
InstanceUtil.waitTillInstancesAreCreated(clusterOC, feed.toString(), 0);
Assert.assertEquals(OozieUtil.checkIfFeedCoordExist(clusterOC, feed.getName(), "REPLICATION"), 1);
//replication should start, wait while it ends
InstanceUtil.waitTillInstanceReachState(clusterOC, Util.readEntityName(feed.toString()), 1,
CoordinatorAction.Status.SUCCEEDED, EntityType.FEED);
//check if data has been replicated correctly
List<Path> cluster1ReplicatedData =
HadoopUtil.getAllFilesRecursivelyHDFS(clusterFS, srcPath);
List<Path> cluster2ReplicatedData =
HadoopUtil.getAllFilesRecursivelyHDFS(externalFS, dstPath);
AssertUtil.checkForListSizes(cluster1ReplicatedData, cluster2ReplicatedData);
final ContentSummary srcSummary = clusterFS.getContentSummary(srcPath);
final ContentSummary dstSummary = externalFS.getContentSummary(dstPath);
Assert.assertEquals(dstSummary.getLength(), srcSummary.getLength());
}
@DataProvider
public Object[][] getData() {
//"-" for single directory, "/" - for dir with subdirs };
return MatrixUtil.crossProduct(new FileSystem[]{wasbFS},
new String[]{"/", "-"},
new Boolean[]{true, false});
}
@DataProvider
public Object[][] getInvalidTargets() {
return new Object[][]{{"wasb://invalid@invalid.blob.core.windows.net/"}};
}
}
| apache-2.0 |
jianglili007/jena | jena-arq/src/main/java/org/apache/jena/riot/WriterGraphRIOTFactory.java | 984 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.riot;
/** Factory for creating graph writers */
public interface WriterGraphRIOTFactory
{
public WriterGraphRIOT create(RDFFormat syntaxForm) ;
}
| apache-2.0 |
JetBrains/jdk8u_jdk | test/java/security/ProtectionDomain/PreserveCombinerTest.java | 2657 | /*
* Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.DomainCombiner;
import java.security.PrivilegedAction;
import java.security.ProtectionDomain;
import sun.misc.SharedSecrets;
/*
* @test
* @bug 8064331
* @summary Make sure that JavaSecurityAccess.doIntersectionPrivilege()
* is not dropping the information about the domain combiner of
* the stack ACC
*/
public class PreserveCombinerTest {
public static void main(String[]args) throws Exception {
final DomainCombiner dc = new DomainCombiner() {
@Override
public ProtectionDomain[] combine(ProtectionDomain[] currentDomains, ProtectionDomain[] assignedDomains) {
return currentDomains; // basically a no-op
}
};
// Get an instance of the saved ACC
AccessControlContext saved = AccessController.getContext();
// Simulate the stack ACC with a DomainCombiner attached
AccessControlContext stack = new AccessControlContext(AccessController.getContext(), dc);
// Now try to run JavaSecurityAccess.doIntersectionPrivilege() and assert
// whether the DomainCombiner from the stack ACC is preserved
boolean ret = SharedSecrets.getJavaSecurityAccess().doIntersectionPrivilege(new PrivilegedAction<Boolean>() {
@Override
public Boolean run() {
return dc == AccessController.getContext().getDomainCombiner();
}
}, stack, saved);
if (!ret) {
System.exit(1);
}
}
}
| gpl-2.0 |
bbcarchdev/qpid-proton | proton-j/contrib/proton-hawtdispatch/src/main/java/org/apache/qpid/proton/hawtdispatch/api/AmqpConnection.java | 5887 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.proton.hawtdispatch.api;
import org.apache.qpid.proton.amqp.transport.ErrorCondition;
import org.apache.qpid.proton.hawtdispatch.impl.AmqpListener;
import org.apache.qpid.proton.hawtdispatch.impl.AmqpTransport;
import org.apache.qpid.proton.engine.Delivery;
import org.apache.qpid.proton.engine.Endpoint;
import org.apache.qpid.proton.engine.ProtonJConnection;
import org.apache.qpid.proton.engine.ProtonJSession;
import org.apache.qpid.proton.engine.impl.ProtocolTracer;
import org.fusesource.hawtdispatch.DispatchQueue;
import org.fusesource.hawtdispatch.Task;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
/**
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
public class AmqpConnection extends AmqpEndpointBase {
AmqpTransport transport;
ProtonJConnection connection;
HashSet<AmqpSender> senders = new HashSet<AmqpSender>();
boolean closing = false;
public static AmqpConnection connect(AmqpConnectOptions options) {
return new AmqpConnection(options);
}
private AmqpConnection(AmqpConnectOptions options) {
transport = AmqpTransport.connect(options);
transport.setListener(new AmqpListener() {
@Override
public void processDelivery(Delivery delivery) {
Attachment attachment = (Attachment) getTransport().context(delivery.getLink()).getAttachment();
AmqpLink link = (AmqpLink) attachment.endpoint();
link.processDelivery(delivery);
}
@Override
public void processRefill() {
for(AmqpSender sender: new ArrayList<AmqpSender>(senders)) {
sender.pumpDeliveries();
}
pumpOut();
}
public void processTransportFailure(final IOException e) {
}
});
connection = transport.connection();
connection.open();
attach();
}
public void waitForConnected() throws Exception {
assertNotOnDispatchQueue();
getConnectedFuture().await();
}
public Future<Void> getConnectedFuture() {
final Promise<Void> rc = new Promise<Void>();
queue().execute(new Task() {
@Override
public void run() {
onConnected(rc);
}
});
return rc;
}
public void onConnected(Callback<Void> cb) {
transport.onTransportConnected(cb);
}
@Override
protected Endpoint getEndpoint() {
return connection;
}
@Override
protected AmqpConnection getConnection() {
return this;
}
@Override
protected AmqpEndpointBase getParent() {
return null;
}
public AmqpSession createSession() {
assertExecuting();
ProtonJSession session = connection.session();
session.open();
pumpOut();
return new AmqpSession(this, session);
}
public int getMaxSessions() {
return connection.getMaxChannels();
}
public void disconnect() {
closing = true;
transport.disconnect();
}
public void waitForDisconnected() throws Exception {
assertNotOnDispatchQueue();
getDisconnectedFuture().await();
}
public Future<Void> getDisconnectedFuture() {
final Promise<Void> rc = new Promise<Void>();
queue().execute(new Task() {
@Override
public void run() {
onDisconnected(rc);
}
});
return rc;
}
public void onDisconnected(Callback<Void> cb) {
transport.onTransportDisconnected(cb);
}
public TransportState getTransportState() {
return transport.getState();
}
public Throwable getTransportFailure() {
return transport.getFailure();
}
public Future<Throwable> getTransportFailureFuture() {
final Promise<Throwable> rc = new Promise<Throwable>();
queue().execute(new Task() {
@Override
public void run() {
onTransportFailure(rc);
}
});
return rc;
}
public void onTransportFailure(Callback<Throwable> cb) {
transport.onTransportFailure(cb);
}
@Override
public DispatchQueue queue() {
return super.queue();
}
public void setProtocolTracer(ProtocolTracer protocolTracer) {
transport.setProtocolTracer(protocolTracer);
}
public ProtocolTracer getProtocolTracer() {
return transport.getProtocolTracer();
}
/**
* Once the remote end, closes the transport is disconnected.
*/
@Override
public void close() {
super.close();
onRemoteClose(new Callback<ErrorCondition>() {
@Override
public void onSuccess(ErrorCondition value) {
disconnect();
}
@Override
public void onFailure(Throwable value) {
disconnect();
}
});
}
}
| apache-2.0 |
alexsnaps/ehcache3 | impl/src/main/java/org/ehcache/impl/config/loaderwriter/package-info.java | 843 | /*
* Copyright Terracotta, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Package for configuration classes for default {@link org.ehcache.spi.loaderwriter.CacheLoaderWriter loader writer}
* {@link org.ehcache.spi.loaderwriter.CacheLoaderWriterProvider provider}.
*/
package org.ehcache.impl.config.loaderwriter; | apache-2.0 |
gufengwyx8/Mycat-Server | src/main/java/io/mycat/server/config/ConfigUtil.java | 7844 | /*
* Copyright (c) 2013, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
package io.mycat.server.config;
import io.mycat.server.config.cluster.BeanConfig;
import io.mycat.util.StringUtil;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.EntityResolver;
import org.xml.sax.ErrorHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
/**
* @author mycat
*/
public class ConfigUtil {
public static String filter(String text) {
return filter(text, System.getProperties());
}
public static String filter(String text, Properties properties) {
StringBuilder s = new StringBuilder();
int cur = 0;
int textLen = text.length();
int propStart = -1;
int propStop = -1;
String propName = null;
String propValue = null;
for (; cur < textLen; cur = propStop + 1) {
propStart = text.indexOf("${", cur);
if (propStart < 0) {
break;
}
s.append(text.substring(cur, propStart));
propStop = text.indexOf("}", propStart);
if (propStop < 0) {
throw new ConfigException("Unterminated property: " + text.substring(propStart));
}
propName = text.substring(propStart + 2, propStop);
propValue = properties.getProperty(propName);
if (propValue == null) {
s.append("${").append(propName).append('}');
} else {
s.append(propValue);
}
}
return s.append(text.substring(cur)).toString();
}
public static Document getDocument(final InputStream dtd, InputStream xml) throws ParserConfigurationException,
SAXException, IOException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
//factory.setValidating(false);
factory.setNamespaceAware(false);
DocumentBuilder builder = factory.newDocumentBuilder();
builder.setEntityResolver(new EntityResolver() {
@Override
public InputSource resolveEntity(String publicId, String systemId) {
return new InputSource(dtd);
}
});
builder.setErrorHandler(new ErrorHandler() {
@Override
public void warning(SAXParseException e) {
}
@Override
public void error(SAXParseException e) throws SAXException {
throw e;
}
@Override
public void fatalError(SAXParseException e) throws SAXException {
throw e;
}
});
return builder.parse(xml);
}
public static Map<String, Object> loadAttributes(Element e) {
Map<String, Object> map = new HashMap<String, Object>();
NamedNodeMap nm = e.getAttributes();
for (int j = 0; j < nm.getLength(); j++) {
Node n = nm.item(j);
if (n instanceof Attr) {
Attr attr = (Attr) n;
map.put(attr.getName(), attr.getNodeValue());
}
}
return map;
}
public static Element loadElement(Element parent, String tagName) {
NodeList nodeList = parent.getElementsByTagName(tagName);
if (nodeList.getLength() > 1) {
throw new ConfigException(tagName + " elements length over one!");
}
if (nodeList.getLength() == 1) {
return (Element) nodeList.item(0);
} else {
return null;
}
}
public static Map<String, Object> loadElements(Element parent) {
Map<String, Object> map = new HashMap<String, Object>();
NodeList children = parent.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node node = children.item(i);
if (node instanceof Element) {
Element e = (Element) node;
String name = e.getNodeName();
if ("property".equals(name)) {
String key = e.getAttribute("name");
NodeList nl = e.getElementsByTagName("bean");
if (nl.getLength() == 0) {
String value = e.getTextContent();
map.put(key, StringUtil.isEmpty(value) ? null : value.trim());
} else {
map.put(key, loadBean((Element) nl.item(0)));
}
}
}
}
return map;
}
public static LinkedHashMap<String, Object> loadLinkElements(Element parent) {
LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>();
NodeList children = parent.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node node = children.item(i);
if (node instanceof Element) {
Element e = (Element) node;
String name = e.getNodeName();
if ("property".equals(name)) {
String key = e.getAttribute("name");
NodeList nl = e.getElementsByTagName("bean");
if (nl.getLength() == 0) {
String value = e.getTextContent();
map.put(key, StringUtil.isEmpty(value) ? null : value.trim());
} else {
map.put(key, loadBean((Element) nl.item(0)));
}
}
}
}
return map;
}
public static BeanConfig loadBean(Element parent, String tagName) {
NodeList nodeList = parent.getElementsByTagName(tagName);
if (nodeList.getLength() > 1) {
throw new ConfigException(tagName + " elements length over one!");
}
return loadBean((Element) nodeList.item(0));
}
public static BeanConfig loadBean(Element e) {
if (e == null) {
return null;
}
BeanConfig bean = new BeanConfig();
bean.setName(e.getAttribute("name"));
Element element = loadElement(e, "className");
if (element != null) {
bean.setClassName(element.getTextContent());
} else {
bean.setClassName(e.getAttribute("class"));
}
bean.setParams(loadElements(e));
return bean;
}
} | gpl-2.0 |
marsorp/blog | presto166/presto-raptor/src/main/java/com/facebook/presto/raptor/metadata/ShardPredicate.java | 8478 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.raptor.metadata;
import com.facebook.presto.raptor.RaptorColumnHandle;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.predicate.Domain;
import com.facebook.presto.spi.predicate.Range;
import com.facebook.presto.spi.predicate.Ranges;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.spi.type.Type;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import io.airlift.slice.Slice;
import java.sql.JDBCType;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.List;
import java.util.Map.Entry;
import java.util.StringJoiner;
import static com.facebook.presto.raptor.metadata.DatabaseShardManager.maxColumn;
import static com.facebook.presto.raptor.metadata.DatabaseShardManager.minColumn;
import static com.facebook.presto.raptor.storage.ColumnIndexStatsUtils.jdbcType;
import static com.facebook.presto.raptor.storage.ShardStats.truncateIndexValue;
import static com.facebook.presto.raptor.util.Types.checkType;
import static com.facebook.presto.raptor.util.UuidUtil.uuidStringToBytes;
import static com.facebook.presto.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.getOnlyElement;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
class ShardPredicate
{
private final String predicate;
private final List<JDBCType> types;
private final List<Object> values;
private ShardPredicate(String predicate, List<JDBCType> types, List<Object> values)
{
this.predicate = requireNonNull(predicate, "predicate is null");
this.types = ImmutableList.copyOf(requireNonNull(types, "types is null"));
this.values = ImmutableList.copyOf(requireNonNull(values, "values is null"));
checkArgument(types.size() == values.size(), "types and values sizes do not match");
}
public String getPredicate()
{
return predicate;
}
public void bind(PreparedStatement statement)
throws SQLException
{
for (int i = 0; i < types.size(); i++) {
JDBCType type = types.get(i);
Object value = values.get(i);
bindValue(statement, type, value, i + 1);
}
}
@Override
public String toString()
{
return toStringHelper(this)
.addValue(predicate)
.toString();
}
public static ShardPredicate create(TupleDomain<RaptorColumnHandle> tupleDomain, boolean bucketed)
{
StringJoiner predicate = new StringJoiner(" AND ").setEmptyValue("true");
ImmutableList.Builder<JDBCType> types = ImmutableList.builder();
ImmutableList.Builder<Object> values = ImmutableList.builder();
for (Entry<RaptorColumnHandle, Domain> entry : tupleDomain.getDomains().get().entrySet()) {
Domain domain = entry.getValue();
if (domain.isNullAllowed() || domain.isAll()) {
continue;
}
RaptorColumnHandle handle = entry.getKey();
Type type = handle.getColumnType();
JDBCType jdbcType = jdbcType(type);
if (jdbcType == null) {
continue;
}
if (handle.isShardUuid()) {
predicate.add(createShardPredicate(types, values, domain, jdbcType));
continue;
}
if (!domain.getType().isOrderable()) {
continue;
}
Ranges ranges = domain.getValues().getRanges();
// TODO: support multiple ranges
if (ranges.getRangeCount() != 1) {
continue;
}
Range range = getOnlyElement(ranges.getOrderedRanges());
Object minValue = null;
Object maxValue = null;
if (range.isSingleValue()) {
minValue = range.getSingleValue();
maxValue = range.getSingleValue();
}
else {
if (!range.getLow().isLowerUnbounded()) {
minValue = range.getLow().getValue();
}
if (!range.getHigh().isUpperUnbounded()) {
maxValue = range.getHigh().getValue();
}
}
String min;
String max;
if (handle.isBucketNumber()) {
if (!bucketed) {
predicate.add("false");
continue;
}
min = "bucket_number";
max = "bucket_number";
}
else {
min = minColumn(handle.getColumnId());
max = maxColumn(handle.getColumnId());
}
if (minValue != null) {
predicate.add(format("(%s >= ? OR %s IS NULL)", max, max));
types.add(jdbcType);
values.add(minValue);
}
if (maxValue != null) {
predicate.add(format("(%s <= ? OR %s IS NULL)", min, min));
types.add(jdbcType);
values.add(maxValue);
}
}
return new ShardPredicate(predicate.toString(), types.build(), values.build());
}
private static String createShardPredicate(ImmutableList.Builder<JDBCType> types, ImmutableList.Builder<Object> values, Domain domain, JDBCType jdbcType)
{
List<Range> ranges = domain.getValues().getRanges().getOrderedRanges();
// only apply predicates if all ranges are single values
if (ranges.isEmpty() || !ranges.stream().allMatch(Range::isSingleValue)) {
return "true";
}
ImmutableList.Builder<Object> valuesBuilder = ImmutableList.builder();
ImmutableList.Builder<JDBCType> typesBuilder = ImmutableList.builder();
StringJoiner rangePredicate = new StringJoiner(" OR ");
for (Range range : ranges) {
Slice uuidText = checkType(range.getSingleValue(), Slice.class, "uuid");
try {
Slice uuidBytes = uuidStringToBytes(uuidText);
typesBuilder.add(jdbcType);
valuesBuilder.add(uuidBytes);
}
catch (IllegalArgumentException e) {
return "true";
}
rangePredicate.add("shard_uuid = ?");
}
types.addAll(typesBuilder.build());
values.addAll(valuesBuilder.build());
return rangePredicate.toString();
}
@VisibleForTesting
protected List<JDBCType> getTypes()
{
return types;
}
@VisibleForTesting
protected List<Object> getValues()
{
return values;
}
public static void bindValue(PreparedStatement statement, JDBCType type, Object value, int index)
throws SQLException
{
if (value == null) {
statement.setNull(index, type.getVendorTypeNumber());
return;
}
switch (type) {
case BOOLEAN:
statement.setBoolean(index, (boolean) value);
return;
case INTEGER:
statement.setInt(index, ((Number) value).intValue());
return;
case BIGINT:
statement.setLong(index, ((Number) value).longValue());
return;
case DOUBLE:
statement.setDouble(index, ((Number) value).doubleValue());
return;
case VARBINARY:
statement.setBytes(index, truncateIndexValue((Slice) value).getBytes());
return;
}
throw new PrestoException(GENERIC_INTERNAL_ERROR, "Unhandled type: " + type);
}
}
| apache-2.0 |
charles-cooper/idylfin | src/org/apache/commons/math3/genetics/RandomKeyMutation.java | 2260 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.genetics;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.math3.exception.MathIllegalArgumentException;
import org.apache.commons.math3.exception.util.LocalizedFormats;
/**
* Mutation operator for {@link RandomKey}s. Changes a randomly chosen element
* of the array representation to a random value uniformly distributed in [0,1].
*
* @since 2.0
* @version $Id: RandomKeyMutation.java 1416643 2012-12-03 19:37:14Z tn $
*/
public class RandomKeyMutation implements MutationPolicy {
/**
* {@inheritDoc}
*
* @throws MathIllegalArgumentException if <code>original</code> is not a {@link RandomKey} instance
*/
public Chromosome mutate(final Chromosome original) throws MathIllegalArgumentException {
if (!(original instanceof RandomKey<?>)) {
throw new MathIllegalArgumentException(LocalizedFormats.RANDOMKEY_MUTATION_WRONG_CLASS,
original.getClass().getSimpleName());
}
RandomKey<?> originalRk = (RandomKey<?>) original;
List<Double> repr = originalRk.getRepresentation();
int rInd = GeneticAlgorithm.getRandomGenerator().nextInt(repr.size());
List<Double> newRepr = new ArrayList<Double> (repr);
newRepr.set(rInd, GeneticAlgorithm.getRandomGenerator().nextDouble());
return originalRk.newFixedLengthChromosome(newRepr);
}
}
| apache-2.0 |
yongminxia/elasticsearch | core/src/main/java/org/elasticsearch/index/query/functionscore/ScoreFunctionParserMapper.java | 3171 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query.functionscore;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionParser;
import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionParser;
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser;
import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionParser;
import org.elasticsearch.index.query.functionscore.random.RandomScoreFunctionParser;
import org.elasticsearch.index.query.functionscore.script.ScriptScoreFunctionParser;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class ScoreFunctionParserMapper {
protected Map<String, ScoreFunctionParser> functionParsers;
@Inject
public ScoreFunctionParserMapper(Set<ScoreFunctionParser> parsers) {
Map<String, ScoreFunctionParser> map = new HashMap<>();
// built-in parsers
addParser(new ScriptScoreFunctionParser(), map);
addParser(new GaussDecayFunctionParser(), map);
addParser(new LinearDecayFunctionParser(), map);
addParser(new ExponentialDecayFunctionParser(), map);
addParser(new RandomScoreFunctionParser(), map);
addParser(new FieldValueFactorFunctionParser(), map);
for (ScoreFunctionParser scoreFunctionParser : parsers) {
addParser(scoreFunctionParser, map);
}
this.functionParsers = Collections.unmodifiableMap(map);
}
public ScoreFunctionParser get(QueryParseContext parseContext, String parserName) {
ScoreFunctionParser functionParser = get(parserName);
if (functionParser == null) {
throw new QueryParsingException(parseContext, "No function with the name [" + parserName + "] is registered.");
}
return functionParser;
}
private ScoreFunctionParser get(String parserName) {
return functionParsers.get(parserName);
}
private void addParser(ScoreFunctionParser scoreFunctionParser, Map<String, ScoreFunctionParser> map) {
for (String name : scoreFunctionParser.getNames()) {
map.put(name, scoreFunctionParser);
}
}
}
| apache-2.0 |
Natrezim/perun | perun-registrar-lib/src/main/java/cz/metacentrum/perun/registrar/exceptions/RegistrarException.java | 531 | package cz.metacentrum.perun.registrar.exceptions;
import cz.metacentrum.perun.core.api.exceptions.PerunException;
/**
* Custom exception thrown by Registrar.
* It's processed in GUI and shows raw message text.
*
* @author Pavel Zlamal <256627@mail.muni.cz>
*/
public class RegistrarException extends PerunException {
private static final long serialVersionUID = 1L;
public RegistrarException(String message) {
super(message);
}
public RegistrarException(String message, Throwable ex) {
super(message, ex);
}
}
| bsd-2-clause |
wouterv/orientdb | tests/src/test/java/com/orientechnologies/orient/test/database/auto/ComplexTypesTest.java | 10279 | /*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.test.database.auto;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
import org.testng.Assert;
import org.testng.annotations.Optional;
import org.testng.annotations.Parameters;
import org.testng.annotations.Test;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
@SuppressWarnings("unchecked")
@Test(groups = { "crud", "record-vobject" })
public class ComplexTypesTest extends DocumentDBBaseTest {
@Parameters(value = "url")
public ComplexTypesTest(@Optional String url) {
super(url);
}
@Test
public void testBigDecimal() {
ODocument newDoc = new ODocument();
newDoc.field("integer", new BigInteger("10"));
newDoc.field("decimal_integer", new BigDecimal(10));
newDoc.field("decimal_float", new BigDecimal("10.34"));
database.save(newDoc);
final ORID rid = newDoc.getIdentity();
database.close();
database = new ODatabaseDocumentTx(url).open("admin", "admin");
ODocument loadedDoc = database.load(rid);
Assert.assertEquals(((Number) loadedDoc.field("integer")).intValue(), 10);
Assert.assertEquals(loadedDoc.field("decimal_integer"), new BigDecimal(10));
Assert.assertEquals(loadedDoc.field("decimal_float"), new BigDecimal("10.34"));
}
@Test
public void testEmbeddedList() {
ODocument newDoc = new ODocument();
final ArrayList<ODocument> list = new ArrayList<ODocument>();
newDoc.field("embeddedList", list, OType.EMBEDDEDLIST);
list.add(new ODocument().field("name", "Luca"));
list.add(new ODocument("Account").field("name", "Marcus"));
database.save(newDoc);
final ORID rid = newDoc.getIdentity();
database.close();
database = new ODatabaseDocumentTx(url).open("admin", "admin");
ODocument loadedDoc = database.load(rid);
Assert.assertTrue(loadedDoc.containsField("embeddedList"));
Assert.assertTrue(loadedDoc.field("embeddedList") instanceof List<?>);
Assert.assertTrue(((List<ODocument>) loadedDoc.field("embeddedList")).get(0) instanceof ODocument);
ODocument d = ((List<ODocument>) loadedDoc.field("embeddedList")).get(0);
Assert.assertEquals(d.field("name"), "Luca");
d = ((List<ODocument>) loadedDoc.field("embeddedList")).get(1);
Assert.assertEquals(d.getClassName(), "Account");
Assert.assertEquals(d.field("name"), "Marcus");
}
@Test
public void testLinkList() {
ODocument newDoc = new ODocument();
final ArrayList<ODocument> list = new ArrayList<ODocument>();
newDoc.field("linkedList", list, OType.LINKLIST);
list.add(new ODocument().field("name", "Luca"));
list.add(new ODocument("Account").field("name", "Marcus"));
database.save(newDoc);
final ORID rid = newDoc.getIdentity();
database.close();
database = new ODatabaseDocumentTx(url).open("admin", "admin");
ODocument loadedDoc = database.load(rid);
Assert.assertTrue(loadedDoc.containsField("linkedList"));
Assert.assertTrue(loadedDoc.field("linkedList") instanceof List<?>);
Assert.assertTrue(((List<ODocument>) loadedDoc.field("linkedList")).get(0) instanceof ODocument);
ODocument d = ((List<ODocument>) loadedDoc.field("linkedList")).get(0);
Assert.assertTrue(d.getIdentity().isValid());
Assert.assertEquals(d.field("name"), "Luca");
d = ((List<ODocument>) loadedDoc.field("linkedList")).get(1);
Assert.assertEquals(d.getClassName(), "Account");
Assert.assertEquals(d.field("name"), "Marcus");
}
@Test
public void testEmbeddedSet() {
ODocument newDoc = new ODocument();
final Set<ODocument> set = new HashSet<ODocument>();
newDoc.field("embeddedSet", set, OType.EMBEDDEDSET);
set.add(new ODocument().field("name", "Luca"));
set.add(new ODocument("Account").field("name", "Marcus"));
database.save(newDoc);
final ORID rid = newDoc.getIdentity();
database.close();
database = new ODatabaseDocumentTx(url).open("admin", "admin");
ODocument loadedDoc = database.load(rid);
Assert.assertTrue(loadedDoc.containsField("embeddedSet"));
Assert.assertTrue(loadedDoc.field("embeddedSet", Set.class) instanceof Set<?>);
final Iterator<ODocument> it = ((Collection<ODocument>) loadedDoc.field("embeddedSet")).iterator();
int tot = 0;
while (it.hasNext()) {
ODocument d = it.next();
Assert.assertTrue(d instanceof ODocument);
if (d.field("name").equals("Marcus"))
Assert.assertEquals(d.getClassName(), "Account");
++tot;
}
Assert.assertEquals(tot, 2);
}
@Test
public void testLinkSet() {
ODocument newDoc = new ODocument();
final Set<ODocument> set = new HashSet<ODocument>();
newDoc.field("linkedSet", set, OType.LINKSET);
set.add(new ODocument().field("name", "Luca"));
set.add(new ODocument("Account").field("name", "Marcus"));
database.save(newDoc);
final ORID rid = newDoc.getIdentity();
database.close();
database = new ODatabaseDocumentTx(url).open("admin", "admin");
ODocument loadedDoc = database.load(rid);
Assert.assertTrue(loadedDoc.containsField("linkedSet"));
Assert.assertTrue(loadedDoc.field("linkedSet", Set.class) instanceof Set<?>);
final Iterator<ODocument> it = ((Collection<ODocument>) loadedDoc.field("linkedSet")).iterator();
int tot = 0;
while (it.hasNext()) {
ODocument d = it.next();
Assert.assertTrue(d instanceof ODocument);
if (d.field("name").equals("Marcus"))
Assert.assertEquals(d.getClassName(), "Account");
++tot;
}
Assert.assertEquals(tot, 2);
}
@Test
public void testEmbeddedMap() {
ODocument newDoc = new ODocument();
final Map<String, ODocument> map = new HashMap<String, ODocument>();
newDoc.field("embeddedMap", map, OType.EMBEDDEDMAP);
map.put("Luca", new ODocument().field("name", "Luca"));
map.put("Marcus", new ODocument().field("name", "Marcus"));
map.put("Cesare", new ODocument("Account").field("name", "Cesare"));
database.save(newDoc);
final ORID rid = newDoc.getIdentity();
database.close();
database = new ODatabaseDocumentTx(url).open("admin", "admin");
ODocument loadedDoc = database.load(rid);
Assert.assertTrue(loadedDoc.containsField("embeddedMap"));
Assert.assertTrue(loadedDoc.field("embeddedMap") instanceof Map<?, ?>);
Assert.assertTrue(((Map<String, ODocument>) loadedDoc.field("embeddedMap")).values().iterator().next() instanceof ODocument);
ODocument d = ((Map<String, ODocument>) loadedDoc.field("embeddedMap")).get("Luca");
Assert.assertEquals(d.field("name"), "Luca");
d = ((Map<String, ODocument>) loadedDoc.field("embeddedMap")).get("Marcus");
Assert.assertEquals(d.field("name"), "Marcus");
d = ((Map<String, ODocument>) loadedDoc.field("embeddedMap")).get("Cesare");
Assert.assertEquals(d.field("name"), "Cesare");
Assert.assertEquals(d.getClassName(), "Account");
}
@Test
public void testEmptyEmbeddedMap() {
ODocument newDoc = new ODocument();
final Map<String, ODocument> map = new HashMap<String, ODocument>();
newDoc.field("embeddedMap", map, OType.EMBEDDEDMAP);
database.save(newDoc);
final ORID rid = newDoc.getIdentity();
database.close();
database = new ODatabaseDocumentTx(url).open("admin", "admin");
ODocument loadedDoc = database.load(rid);
Assert.assertTrue(loadedDoc.containsField("embeddedMap"));
Assert.assertTrue(loadedDoc.field("embeddedMap") instanceof Map<?, ?>);
final Map<String, ODocument> loadedMap = loadedDoc.field("embeddedMap");
Assert.assertEquals(loadedMap.size(), 0);
}
@Test
public void testLinkMap() {
ODocument newDoc = new ODocument();
final Map<String, ODocument> map = new HashMap<String, ODocument>();
newDoc.field("linkedMap", map, OType.LINKMAP);
map.put("Luca", new ODocument().field("name", "Luca"));
map.put("Marcus", new ODocument().field("name", "Marcus"));
map.put("Cesare", new ODocument("Account").field("name", "Cesare"));
database.save(newDoc);
final ORID rid = newDoc.getIdentity();
database.close();
database = new ODatabaseDocumentTx(url).open("admin", "admin");
ODocument loadedDoc = database.load(rid);
Assert.assertNotNull(loadedDoc.field("linkedMap", OType.LINKMAP));
Assert.assertTrue(loadedDoc.field("linkedMap") instanceof Map<?, ?>);
Assert.assertTrue(((Map<String, ODocument>) loadedDoc.field("linkedMap")).values().iterator().next() instanceof ODocument);
ODocument d = ((Map<String, ODocument>) loadedDoc.field("linkedMap")).get("Luca");
Assert.assertEquals(d.field("name"), "Luca");
d = ((Map<String, ODocument>) loadedDoc.field("linkedMap")).get("Marcus");
Assert.assertEquals(d.field("name"), "Marcus");
d = ((Map<String, ODocument>) loadedDoc.field("linkedMap")).get("Cesare");
Assert.assertEquals(d.field("name"), "Cesare");
Assert.assertEquals(d.getClassName(), "Account");
}
}
| apache-2.0 |
franz1981/activemq-artemis | artemis-ra/src/main/java/org/apache/activemq/artemis/ra/ActiveMQRAQueueReceiver.java | 1953 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.ra;
import javax.jms.JMSException;
import javax.jms.Queue;
import javax.jms.QueueReceiver;
/**
* A wrapper for a queue receiver
*/
public class ActiveMQRAQueueReceiver extends ActiveMQRAMessageConsumer implements QueueReceiver {
/**
* Whether trace is enabled
*/
private static boolean trace = ActiveMQRALogger.LOGGER.isTraceEnabled();
/**
* Create a new wrapper
*
* @param consumer the queue receiver
* @param session the session
*/
public ActiveMQRAQueueReceiver(final QueueReceiver consumer, final ActiveMQRASession session) {
super(consumer, session);
if (ActiveMQRAQueueReceiver.trace) {
ActiveMQRALogger.LOGGER.trace("constructor(" + consumer + ", " + session + ")");
}
}
/**
* Get queue
*
* @return The queue
* @throws JMSException Thrown if an error occurs
*/
@Override
public Queue getQueue() throws JMSException {
if (ActiveMQRAQueueReceiver.trace) {
ActiveMQRALogger.LOGGER.trace("getQueue()");
}
checkState();
return ((QueueReceiver) consumer).getQueue();
}
}
| apache-2.0 |
Subsets and Splits