Java源码示例:com.sun.xml.internal.fastinfoset.stax.StAXDocumentSerializer
示例1
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception {
StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer();
documentSerializer.setOutputStream(finf);
SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer);
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
saxParserFactory.setNamespaceAware(true);
SAXParser saxParser = saxParserFactory.newSAXParser();
XMLReader reader = saxParser.getXMLReader();
reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax);
reader.setContentHandler(saxTostax);
if (workingDirectory != null) {
reader.setEntityResolver(createRelativePathResolver(workingDirectory));
}
reader.parse(new InputSource(xml));
xml.close();
finf.close();
}
示例2
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out,
boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) {
StAXDocumentSerializer serializer = new StAXDocumentSerializer(out);
if (retainState) {
/**
* Create a serializer vocabulary external to the serializer.
* This will ensure that the vocabulary will never be cleared
* for each serialization and will be retained (and will grow)
* for each serialization
*/
SerializerVocabulary vocabulary = new SerializerVocabulary();
serializer.setVocabulary(vocabulary);
serializer.setMinAttributeValueSize(0);
serializer.setMaxAttributeValueSize(indexedStringSizeLimit);
serializer.setMinCharacterContentChunkSize(0);
serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit);
serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit);
serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit);
}
return serializer;
}
示例3
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception {
StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer();
documentSerializer.setOutputStream(finf);
SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer);
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
saxParserFactory.setNamespaceAware(true);
SAXParser saxParser = saxParserFactory.newSAXParser();
XMLReader reader = saxParser.getXMLReader();
reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax);
reader.setContentHandler(saxTostax);
if (workingDirectory != null) {
reader.setEntityResolver(createRelativePathResolver(workingDirectory));
}
reader.parse(new InputSource(xml));
xml.close();
finf.close();
}
示例4
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out,
boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) {
StAXDocumentSerializer serializer = new StAXDocumentSerializer(out);
if (retainState) {
/**
* Create a serializer vocabulary external to the serializer.
* This will ensure that the vocabulary will never be cleared
* for each serialization and will be retained (and will grow)
* for each serialization
*/
SerializerVocabulary vocabulary = new SerializerVocabulary();
serializer.setVocabulary(vocabulary);
serializer.setMinAttributeValueSize(0);
serializer.setMaxAttributeValueSize(indexedStringSizeLimit);
serializer.setMinCharacterContentChunkSize(0);
serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit);
serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit);
serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit);
}
return serializer;
}
示例5
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception {
StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer();
documentSerializer.setOutputStream(finf);
SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer);
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
saxParserFactory.setNamespaceAware(true);
SAXParser saxParser = saxParserFactory.newSAXParser();
XMLReader reader = saxParser.getXMLReader();
reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax);
reader.setContentHandler(saxTostax);
if (workingDirectory != null) {
reader.setEntityResolver(createRelativePathResolver(workingDirectory));
}
reader.parse(new InputSource(xml));
xml.close();
finf.close();
}
示例6
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out,
boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) {
StAXDocumentSerializer serializer = new StAXDocumentSerializer(out);
if (retainState) {
/**
* Create a serializer vocabulary external to the serializer.
* This will ensure that the vocabulary will never be cleared
* for each serialization and will be retained (and will grow)
* for each serialization
*/
SerializerVocabulary vocabulary = new SerializerVocabulary();
serializer.setVocabulary(vocabulary);
serializer.setMinAttributeValueSize(0);
serializer.setMaxAttributeValueSize(indexedStringSizeLimit);
serializer.setMinCharacterContentChunkSize(0);
serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit);
serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit);
serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit);
}
return serializer;
}
示例7
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception {
StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer();
documentSerializer.setOutputStream(finf);
SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer);
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
saxParserFactory.setNamespaceAware(true);
SAXParser saxParser = saxParserFactory.newSAXParser();
XMLReader reader = saxParser.getXMLReader();
reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax);
reader.setContentHandler(saxTostax);
if (workingDirectory != null) {
reader.setEntityResolver(createRelativePathResolver(workingDirectory));
}
reader.parse(new InputSource(xml));
xml.close();
finf.close();
}
示例8
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out,
boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) {
StAXDocumentSerializer serializer = new StAXDocumentSerializer(out);
if (retainState) {
/**
* Create a serializer vocabulary external to the serializer.
* This will ensure that the vocabulary will never be cleared
* for each serialization and will be retained (and will grow)
* for each serialization
*/
SerializerVocabulary vocabulary = new SerializerVocabulary();
serializer.setVocabulary(vocabulary);
serializer.setMinAttributeValueSize(0);
serializer.setMaxAttributeValueSize(indexedStringSizeLimit);
serializer.setMinCharacterContentChunkSize(0);
serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit);
serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit);
serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit);
}
return serializer;
}
示例9
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out,
boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) {
StAXDocumentSerializer serializer = new StAXDocumentSerializer(out);
if (retainState) {
/**
* Create a serializer vocabulary external to the serializer.
* This will ensure that the vocabulary will never be cleared
* for each serialization and will be retained (and will grow)
* for each serialization
*/
SerializerVocabulary vocabulary = new SerializerVocabulary();
serializer.setVocabulary(vocabulary);
serializer.setMinAttributeValueSize(0);
serializer.setMaxAttributeValueSize(indexedStringSizeLimit);
serializer.setMinCharacterContentChunkSize(0);
serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit);
serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit);
serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit);
}
return serializer;
}
示例10
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception {
StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer();
documentSerializer.setOutputStream(finf);
SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer);
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
saxParserFactory.setNamespaceAware(true);
SAXParser saxParser = saxParserFactory.newSAXParser();
XMLReader reader = saxParser.getXMLReader();
reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax);
reader.setContentHandler(saxTostax);
if (workingDirectory != null) {
reader.setEntityResolver(createRelativePathResolver(workingDirectory));
}
reader.parse(new InputSource(xml));
xml.close();
finf.close();
}
示例11
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception {
StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer();
documentSerializer.setOutputStream(finf);
SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer);
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
saxParserFactory.setNamespaceAware(true);
SAXParser saxParser = saxParserFactory.newSAXParser();
XMLReader reader = saxParser.getXMLReader();
reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax);
reader.setContentHandler(saxTostax);
if (workingDirectory != null) {
reader.setEntityResolver(createRelativePathResolver(workingDirectory));
}
reader.parse(new InputSource(xml));
xml.close();
finf.close();
}
示例12
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out,
boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) {
StAXDocumentSerializer serializer = new StAXDocumentSerializer(out);
if (retainState) {
/**
* Create a serializer vocabulary external to the serializer.
* This will ensure that the vocabulary will never be cleared
* for each serialization and will be retained (and will grow)
* for each serialization
*/
SerializerVocabulary vocabulary = new SerializerVocabulary();
serializer.setVocabulary(vocabulary);
serializer.setMinAttributeValueSize(0);
serializer.setMaxAttributeValueSize(indexedStringSizeLimit);
serializer.setMinCharacterContentChunkSize(0);
serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit);
serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit);
serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit);
}
return serializer;
}
示例13
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception {
StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer();
documentSerializer.setOutputStream(finf);
SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer);
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
saxParserFactory.setNamespaceAware(true);
SAXParser saxParser = saxParserFactory.newSAXParser();
XMLReader reader = saxParser.getXMLReader();
reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax);
reader.setContentHandler(saxTostax);
if (workingDirectory != null) {
reader.setEntityResolver(createRelativePathResolver(workingDirectory));
}
reader.parse(new InputSource(xml));
xml.close();
finf.close();
}
示例14
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out,
boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) {
StAXDocumentSerializer serializer = new StAXDocumentSerializer(out);
if (retainState) {
/**
* Create a serializer vocabulary external to the serializer.
* This will ensure that the vocabulary will never be cleared
* for each serialization and will be retained (and will grow)
* for each serialization
*/
SerializerVocabulary vocabulary = new SerializerVocabulary();
serializer.setVocabulary(vocabulary);
serializer.setMinAttributeValueSize(0);
serializer.setMaxAttributeValueSize(indexedStringSizeLimit);
serializer.setMinCharacterContentChunkSize(0);
serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit);
serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit);
serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit);
}
return serializer;
}
示例15
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception {
StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer();
documentSerializer.setOutputStream(finf);
SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer);
SAXParserFactory saxParserFactory = SAXParserFactory.newInstance();
saxParserFactory.setNamespaceAware(true);
SAXParser saxParser = saxParserFactory.newSAXParser();
XMLReader reader = saxParser.getXMLReader();
reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax);
reader.setContentHandler(saxTostax);
if (workingDirectory != null) {
reader.setEntityResolver(createRelativePathResolver(workingDirectory));
}
reader.parse(new InputSource(xml));
xml.close();
finf.close();
}
示例16
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out,
boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) {
StAXDocumentSerializer serializer = new StAXDocumentSerializer(out);
if (retainState) {
/**
* Create a serializer vocabulary external to the serializer.
* This will ensure that the vocabulary will never be cleared
* for each serialization and will be retained (and will grow)
* for each serialization
*/
SerializerVocabulary vocabulary = new SerializerVocabulary();
serializer.setVocabulary(vocabulary);
serializer.setMinAttributeValueSize(0);
serializer.setMaxAttributeValueSize(indexedStringSizeLimit);
serializer.setMinCharacterContentChunkSize(0);
serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit);
serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit);
serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit);
}
return serializer;
}
示例17
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out,
JAXBContextImpl context) {
super(out, NoEscapeHandler.theInstance);
this.fiout = out;
this.localNames = context.getUTF8NameTable();
final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData();
AppData appData = null;
if (vocabAppData == null || !(vocabAppData instanceof AppData)) {
appData = new AppData();
fiout.setVocabularyApplicationData(appData);
} else {
appData = (AppData)vocabAppData;
}
final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context);
if (tablesPerContext != null) {
tables = tablesPerContext;
/**
* Obtain the current local name index. Thus will be used to
* calculate the maximum index value when serializing for this context
*/
tables.clearOrResetTables(out.getLocalNameIndex());
} else {
tables = new TablesPerJAXBContext(context, out.getLocalNameIndex());
appData.contexts.put(context, tables);
}
}
示例18
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out,
JAXBContextImpl context) {
super(out);
this.fiout = out;
this.localNames = context.getUTF8NameTable();
final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData();
AppData appData = null;
if (vocabAppData == null || !(vocabAppData instanceof AppData)) {
appData = new AppData();
fiout.setVocabularyApplicationData(appData);
} else {
appData = (AppData)vocabAppData;
}
final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context);
if (tablesPerContext != null) {
tables = tablesPerContext;
/**
* Obtain the current local name index. Thus will be used to
* calculate the maximum index value when serializing for this context
*/
tables.clearOrResetTables(out.getLocalNameIndex());
} else {
tables = new TablesPerJAXBContext(context, out.getLocalNameIndex());
appData.contexts.put(context, tables);
}
}
示例19
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out,
JAXBContextImpl context) {
super(out, NoEscapeHandler.theInstance);
this.fiout = out;
this.localNames = context.getUTF8NameTable();
final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData();
AppData appData = null;
if (vocabAppData == null || !(vocabAppData instanceof AppData)) {
appData = new AppData();
fiout.setVocabularyApplicationData(appData);
} else {
appData = (AppData)vocabAppData;
}
final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context);
if (tablesPerContext != null) {
tables = tablesPerContext;
/**
* Obtain the current local name index. Thus will be used to
* calculate the maximum index value when serializing for this context
*/
tables.clearOrResetTables(out.getLocalNameIndex());
} else {
tables = new TablesPerJAXBContext(context, out.getLocalNameIndex());
appData.contexts.put(context, tables);
}
}
示例20
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out,
JAXBContextImpl context) {
super(out, NoEscapeHandler.theInstance);
this.fiout = out;
this.localNames = context.getUTF8NameTable();
final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData();
AppData appData = null;
if (vocabAppData == null || !(vocabAppData instanceof AppData)) {
appData = new AppData();
fiout.setVocabularyApplicationData(appData);
} else {
appData = (AppData)vocabAppData;
}
final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context);
if (tablesPerContext != null) {
tables = tablesPerContext;
/**
* Obtain the current local name index. Thus will be used to
* calculate the maximum index value when serializing for this context
*/
tables.clearOrResetTables(out.getLocalNameIndex());
} else {
tables = new TablesPerJAXBContext(context, out.getLocalNameIndex());
appData.contexts.put(context, tables);
}
}
示例21
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out,
JAXBContextImpl context) {
super(out, NoEscapeHandler.theInstance);
this.fiout = out;
this.localNames = context.getUTF8NameTable();
final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData();
AppData appData = null;
if (vocabAppData == null || !(vocabAppData instanceof AppData)) {
appData = new AppData();
fiout.setVocabularyApplicationData(appData);
} else {
appData = (AppData)vocabAppData;
}
final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context);
if (tablesPerContext != null) {
tables = tablesPerContext;
/**
* Obtain the current local name index. Thus will be used to
* calculate the maximum index value when serializing for this context
*/
tables.clearOrResetTables(out.getLocalNameIndex());
} else {
tables = new TablesPerJAXBContext(context, out.getLocalNameIndex());
appData.contexts.put(context, tables);
}
}
示例22
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out,
JAXBContextImpl context) {
super(out);
this.fiout = out;
this.localNames = context.getUTF8NameTable();
final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData();
AppData appData = null;
if (vocabAppData == null || !(vocabAppData instanceof AppData)) {
appData = new AppData();
fiout.setVocabularyApplicationData(appData);
} else {
appData = (AppData)vocabAppData;
}
final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context);
if (tablesPerContext != null) {
tables = tablesPerContext;
/**
* Obtain the current local name index. Thus will be used to
* calculate the maximum index value when serializing for this context
*/
tables.clearOrResetTables(out.getLocalNameIndex());
} else {
tables = new TablesPerJAXBContext(context, out.getLocalNameIndex());
appData.contexts.put(context, tables);
}
}
示例23
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out,
JAXBContextImpl context) {
super(out);
this.fiout = out;
this.localNames = context.getUTF8NameTable();
final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData();
AppData appData = null;
if (vocabAppData == null || !(vocabAppData instanceof AppData)) {
appData = new AppData();
fiout.setVocabularyApplicationData(appData);
} else {
appData = (AppData)vocabAppData;
}
final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context);
if (tablesPerContext != null) {
tables = tablesPerContext;
/**
* Obtain the current local name index. Thus will be used to
* calculate the maximum index value when serializing for this context
*/
tables.clearOrResetTables(out.getLocalNameIndex());
} else {
tables = new TablesPerJAXBContext(context, out.getLocalNameIndex());
appData.contexts.put(context, tables);
}
}
示例24
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out,
JAXBContextImpl context) {
super(out);
this.fiout = out;
this.localNames = context.getUTF8NameTable();
final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData();
AppData appData = null;
if (vocabAppData == null || !(vocabAppData instanceof AppData)) {
appData = new AppData();
fiout.setVocabularyApplicationData(appData);
} else {
appData = (AppData)vocabAppData;
}
final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context);
if (tablesPerContext != null) {
tables = tablesPerContext;
/**
* Obtain the current local name index. Thus will be used to
* calculate the maximum index value when serializing for this context
*/
tables.clearOrResetTables(out.getLocalNameIndex());
} else {
tables = new TablesPerJAXBContext(context, out.getLocalNameIndex());
appData.contexts.put(context, tables);
}
}
示例25
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) {
return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT);
}
示例26
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) {
return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT);
}
示例27
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) {
return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT);
}
示例28
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) {
return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT);
}
示例29
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) {
return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT);
}
示例30
/**
* Create a new (@link StAXDocumentSerializer} instance.
*
* @param in the OutputStream to serialize to.
* @param retainState if true the serializer should retain the state of
* vocabulary tables for multiple serializations.
* @return a new {@link StAXDocumentSerializer} instance.
*/
/* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) {
return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT);
}