[docs]classCodec:""" A Simple wrapper class around io.deephaven.util.codec.ObjectCodec """def__init__(self,j_codec):self.j_codec=j_codec
[docs]defcodec(codec:Union[str,j_objectcodec])->Codec:""" Create a Codec instance for use with log_table and log_table_incremental :param codec: the codec as either a string representing the fully qualified java class, or an instance of a java io.deephaven.util.codec.ObjectCodec :return: a new codec """ifisinstance(codec,jpy.JType)andj_objectcodec.jclass.isInstance(codec):returnCodec(codec)jclass=jpy.get_type(codec)ifjclassisNone:raiseDHError("Unable to instantiate codec "+codec)returnCodec(jclass())
[docs]defbyte_array_codec():""" Create a new byte array codec :return: a new byte array codec """returncodec("io.deephaven.enterprise.codec.ByteArrayCodec")
[docs]defchar_array_codec():""" Create a new char array codec :return: a new char array codec """returncodec("io.deephaven.enterprise.codec.CharArrayCodec")
[docs]defshort_array_codec():""" Create a new short array codec :return: a new short array codec """returncodec("io.deephaven.enterprise.codec.ShortArrayCodec")
[docs]defint_array_codec():""" Create a new int array codec :return: a new int array codec """returncodec("io.deephaven.enterprise.codec.IntArrayCodec")
[docs]deflong_array_codec():""" Create a new long array codec :return: a new long array codec """returncodec("io.deephaven.enterprise.codec.LongArrayCodec")
[docs]deffloat_array_codec():""" Create a new float array codec :return: a new float array codec """returncodec("io.deephaven.enterprise.codec.FloatArrayCodec")
[docs]defdouble_array_codec():""" Create a new double array codec :return: a new double array codec """returncodec("io.deephaven.enterprise.codec.DoubleArrayCodec")
[docs]defstring_array_codec():""" Create a new string array codec :return: a new string array codec """returncodec("io.deephaven.enterprise.codec.StringArrayCodec")
[docs]deflog_table(namespace:str,table_name:str,table:Table,columnPartition:str,internalPartition:str=None,applicationVersion:int=None,zone:str=None,useLas:bool=True,logDir:str=None,codecs:Dict[str,Codec]=None):""" Write tableToLog to System storage. The table is logged to Intraday storage and can be retrieved with db.live_table. Historical tables should be written using a merge process. :param namespace: the namespace of the table :param table_name: the name of the table :param table: the table to log :param columnPartition: the column partition to log to, if None then uses the current date :param internalPartition: the internal partition, if None an internal partition is generated :param applicationVersion: the application version, if None defaults to zero :param zone: the time zone ID (as interpreted by java.time.ZoneId.of) :param useLas: use the log aggregator service (defaults to true) :param logDir: the directory for writing binary log files (useLas must be false) :param codecs: an optional map of column name to Codec for encoding the values """j_db=deephaven_enterprise.database.db.j_dbopts=j_stl.newOptionsBuilder()ifcolumnPartitionisNone:opts.currentDateColumnPartition(True)else:opts.fixedColumnPartition(columnPartition)ifinternalPartitionisnotNone:opts.internalPartition(internalPartition)ifapplicationVersionisnotNone:opts.applicationVersion(applicationVersion)ifzoneisnotNone:opts.zoneId(j_zoneid.of(zone))opts.useLas(useLas)iflogDirisnotNone:opts.logDirectory(logDir)ifcodecsisnotNone:forkey,valueincodecs.items():opts.putColumnCodecs(key,value.j_codec)j_stl.logTable(j_db,namespace,table_name,table.j_table,opts.build())
[docs]deflog_table_incremental(namespace:str,table_name:str,table:Table,columnPartition:str,internalPartition:str=None,applicationVersion:int=None,zone:str=None,useLas:bool=True,logDir:str=None,codecs:Dict[str,Codec]=None):""" Write tableToLog to System storage. The table is logged to Intraday storage and can be retrieved with db.live_table. Historical tables should be written using a merge process. No rows should be removed or modified in tableToLog. Modifications are an error. If the table is not a <i>blink</i> table, then removals are an error. :param namespace: the namespace of the table :param table_name: the name of the table :param table: the table to log :param columnPartition: the column partition to log to, if None then uses the current date :param internalPartition: the internal partition, if None an internal partition is generated :param applicationVersion: the application version, if None defaults to zero :param zone: the time zone ID (as interpreted by java.time.ZoneId.of) :param useLas: use the log aggregator service (defaults to true) :param logDir: the directory for writing binary log files (useLas must be false) :param codecs: an optional map of column name to Codec for encoding the values :returns: a context manager that can be used in a with statement, or alternatively you can call close() when complete. Users should hold this return value to ensure liveness for writing. """j_db=deephaven_enterprise.database.db.j_dbopts=j_stl.newOptionsBuilder()ifcolumnPartitionisNone:opts.currentDateColumnPartition(True)else:opts.fixedColumnPartition(columnPartition)ifinternalPartitionisnotNone:opts.internalPartition(internalPartition)ifapplicationVersionisnotNone:opts.applicationVersion(applicationVersion)ifzoneisnotNone:opts.zoneId(j_zoneid.of(zone))opts.useLas(useLas)iflogDirisnotNone:opts.logDirectory(logDir)ifcodecsisnotNone:forkey,valueincodecs.items():opts.putColumnCodecs(key,value.j_codec)closeable=j_stl.logTableIncremental(j_db,namespace,table_name,table.j_table,opts.build())returnAutoCloseable(closeable)