Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
options.label_identifiers)
#
# 3. run
#
analyzer.run_all(exp, out_map)
#
# 4. set dataprocessing and output meta information
#
out_map.sortByPosition()
dp = out_map.getDataProcessing()
p = pyopenms.DataProcessing()
p.setProcessingActions(set([ pyopenms.ProcessingAction().DATA_PROCESSING,
pyopenms.ProcessingAction().PEAK_PICKING,
pyopenms.ProcessingAction().FILTERING,
pyopenms.ProcessingAction().QUANTITATION]))
p.setCompletionTime(date_time)
sw = p.getSoftware()
sw.setName("SILACAnalyzer")
if options.test:
sw.setVersion("version_string")
p.setSoftware(sw)
p.setMetaValue("parameter: mode", "test_mode")
else:
sw.setVersion("pyTOPP v1.10")
p.setSoftware(sw)
dp.append(p)
out_map.setDataProcessing(dp)
chrom.setPrecursor(precursor)
chrom.setNativeID(transition.getNativeID())
if not mapped_already:
notmapped += 1
print "Did not find a mapping for chromatogram", chrom.getNativeID()
if not allow_unmapped: raise Exception("No mapping")
else:
output.addChromatogram(chrom)
if notmapped > 0:
print "Could not find mapping for", notmapped, "chromatogram(s)"
dp = pyopenms.DataProcessing()
# dp.setProcessingActions(ProcessingAction:::FORMAT_CONVERSION)
pa = pyopenms.ProcessingAction().FORMAT_CONVERSION
dp.setProcessingActions(set([pa]))
chromatograms = output.getChromatograms();
for chrom in chromatograms:
this_dp = chrom.getDataProcessing()
this_dp.append(dp)
chrom.setDataProcessing(this_dp)
output.setChromatograms(chromatograms);
return output
chrom.setPrecursor(precursor)
chrom.setNativeID(transition.getNativeID())
if not mapped_already:
notmapped += 1
print "Did not find a mapping for chromatogram", chrom.getNativeID()
if not allow_unmapped: raise Exception("No mapping")
else:
output.addChromatogram(chrom)
if notmapped > 0:
print "Could not find mapping for", notmapped, "chromatogram(s)"
dp = pyopenms.DataProcessing()
# dp.setProcessingActions(ProcessingAction:::FORMAT_CONVERSION)
pa = pyopenms.ProcessingAction().FORMAT_CONVERSION
dp.setProcessingActions(set([pa]))
chromatograms = output.getChromatograms();
for chrom in chromatograms:
this_dp = chrom.getDataProcessing()
this_dp.append(dp)
chrom.setDataProcessing(this_dp)
output.setChromatograms(chromatograms);
return output
if options.is_swath:
do_continue = pyopenms.OpenSwathHelper().checkSwathMapAndSelectTransitions(exp, targeted, transition_exp_used, options.min_upper_edge_dist)
else:
transition_exp_used = targeted
if do_continue:
# set up extractor and run
tmp_out = pyopenms.MSExperiment();
extractor = pyopenms.ChromatogramExtractor()
extractor.extractChromatograms(exp, tmp_out, targeted, options.extraction_window, options.ppm, trafo, options.rt_extraction_window, options.extraction_function)
# add all chromatograms to the output
for chrom in tmp_out.getChromatograms():
output.addChromatogram(chrom)
dp = pyopenms.DataProcessing()
pa = pyopenms.ProcessingAction().SMOOTHING
dp.setProcessingActions(set([pa]))
chromatograms = output.getChromatograms();
for chrom in chromatograms:
this_dp = chrom.getDataProcessing()
this_dp.append(dp)
chrom.setDataProcessing(this_dp)
output.setChromatograms(chromatograms);
pyopenms.MzMLFile().store(options.outfile, output);
#
# 3. run
#
analyzer.run_all(exp, out_map)
#
# 4. set dataprocessing and output meta information
#
out_map.sortByPosition()
dp = out_map.getDataProcessing()
p = pyopenms.DataProcessing()
p.setProcessingActions(set([ pyopenms.ProcessingAction().DATA_PROCESSING,
pyopenms.ProcessingAction().PEAK_PICKING,
pyopenms.ProcessingAction().FILTERING,
pyopenms.ProcessingAction().QUANTITATION]))
p.setCompletionTime(date_time)
sw = p.getSoftware()
sw.setName("SILACAnalyzer")
if options.test:
sw.setVersion("version_string")
p.setSoftware(sw)
p.setMetaValue("parameter: mode", "test_mode")
else:
sw.setVersion("pyTOPP v1.10")
p.setSoftware(sw)
dp.append(p)
out_map.setDataProcessing(dp)
#
# 5. write output
if in_type == pms.Type.CONSENSUSXML:
file_ = pms.ConsensusXMLFile()
map_ = pms.ConsensusMap()
file_.load(in_file, map_)
mapper.annotate(map_, peptide_ids, protein_ids, use_subelements)
addDataProcessing(map_, params, pms.ProcessingAction.IDENTIFICATION_MAPPING)
file_.store(out_file, map_)
elif in_type == pms.Type.FEATUREXML:
file_ = pms.FeatureXMLFile()
map_ = pms.FeatureMap()
file_.load(in_file, map_)
mapper.annotate(map_, peptide_ids, protein_ids, use_centroid_rt,
use_centroid_mz)
addDataProcessing(map_, params, pms.ProcessingAction.IDENTIFICATION_MAPPING)
file_.store(out_file, map_)
elif in_type == pms.Type.MZQ:
file_ = pms.MzQuantMLFile()
msq = pms.MSQuantifications()
file_.load(in_file, msq)
maps = msq.getConsensusMaps()
for map_ in maps:
mapper.annotate(map_, peptide_ids, protein_ids, use_subelements)
addDataProcessing(map_, params, pms.ProcessingAction.IDENTIFICATION_MAPPING)
msq.setConsensusMaps(maps)
file_.store(out_file, msq)
else:
raise Exception("invalid input file format")