本文整理汇总了Java中android.os.SystemClock.elapsedRealtimeNanos方法的典型用法代码示例。如果您正苦于以下问题:Java SystemClock.elapsedRealtimeNanos方法的具体用法?Java SystemClock.elapsedRealtimeNanos怎么用?Java SystemClock.elapsedRealtimeNanos使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类android.os.SystemClock
的用法示例。
在下文中一共展示了SystemClock.elapsedRealtimeNanos方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getLogTime
import android.os.SystemClock; //导入方法依赖的package包/类
/**
* Returns the current time in either millis or nanos depending on the api level to be used with
* {@link #getElapsedMillis(long)}.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
public static long getLogTime() {
if (Build.VERSION_CODES.JELLY_BEAN_MR1 <= Build.VERSION.SDK_INT) {
return SystemClock.elapsedRealtimeNanos();
} else {
return System.currentTimeMillis();
}
}
示例2: getLogTime
import android.os.SystemClock; //导入方法依赖的package包/类
/**
* Returns the current time in either millis or nanos depending on the api level to be used with
* {@link #getElapsedMillis(long)}.
*/
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
public static long getLogTime() {
if (Build.VERSION_CODES.JELLY_BEAN_MR1 <= Build.VERSION.SDK_INT) {
return SystemClock.elapsedRealtimeNanos();
} else {
return SystemClock.uptimeMillis();
}
}
示例3: audioEncodeStep
import android.os.SystemClock; //导入方法依赖的package包/类
private synchronized boolean audioEncodeStep(boolean isEnd){
if(isStarted){
AvLog.d("audioEncodeStep");
int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
if(inputIndex>=0){
ByteBuffer buffer= CodecUtil.getInputBuffer(mAudioEncoder,inputIndex);
buffer.clear();
long time= (SystemClock.elapsedRealtimeNanos()-startTime)/1000;
int length=mRecord.read(buffer,mRecordBufferSize);
if(length>=0){
mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
}
}
MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
while (true){
int outputIndex=mAudioEncoder.dequeueOutputBuffer(info,TIME_OUT);
if(outputIndex>=0){
if(mStore!=null){
mStore.addData(mAudioTrack,new HardMediaData(CodecUtil.getOutputBuffer(mAudioEncoder,outputIndex),info));
}
mAudioEncoder.releaseOutputBuffer(outputIndex,false);
if(info.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
AvLog.d("CameraRecorder get audio encode end of stream");
stop();
return true;
}
}else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
break;
}else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
mAudioTrack=mStore.addTrack(mAudioEncoder.getOutputFormat());
}
}
}
return false;
}
示例4: decodeStream
import android.os.SystemClock; //导入方法依赖的package包/类
/**
* Decode an input stream into a bitmap.
*
* This method save input stream to temporary file on cache directory, because HEIF data
* structure requires multi-pass parsing.
*
* @param is The input stream that holds the raw data to be decoded into a bitmap.
* @return The decoded bitmap, or null if the image could not be decoded.
*/
public static Bitmap decodeStream(InputStream is) {
assertPrecondition();
try {
// write stream to temporary file
long beginTime = SystemClock.elapsedRealtimeNanos();
File heifFile = File.createTempFile("heifreader", "heif", mCacheDir);
FileOutputStream fos = new FileOutputStream(heifFile);
try {
byte[] buf = new byte[4096];
int totalLength = 0;
int len;
while ((len = is.read(buf)) > 0) {
fos.write(buf, 0, len);
totalLength += len;
if (LIMIT_FILESIZE < totalLength) {
Log.e(TAG, "data size exceeds limit(" + LIMIT_FILESIZE + ")");
return null;
}
}
} finally {
fos.close();
}
long endTime = SystemClock.elapsedRealtimeNanos();
Log.i(TAG, "HEIC caching elapsed=" + (endTime - beginTime) / 1000000.f + "[msec]");
return decodeFile(heifFile.getAbsolutePath());
} catch (IOException ex) {
Log.e(TAG, "decodeStream failure", ex);
return null;
}
}
示例5: getAge
import android.os.SystemClock; //导入方法依赖的package包/类
/**
* @param loc location object
* @return age of event in ns
*/
public static Long getAge(Location loc) {
if (loc == null) {
return null;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
return SystemClock.elapsedRealtimeNanos() - loc.getElapsedRealtimeNanos();
} else {
return (System.currentTimeMillis() - loc.getTime()) * 1000000L;
}
}
示例6: start
import android.os.SystemClock; //导入方法依赖的package包/类
public void start(){
if(!isStarted){
stopFlag=false;
mRecordBufferSize = AudioRecord.getMinBufferSize(mRecordSampleRate,
mRecordChannelConfig, mRecordAudioFormat)*2;
mRecord=new AudioRecord(MediaRecorder.AudioSource.MIC,mRecordSampleRate,mRecordChannelConfig,
mRecordAudioFormat,mRecordBufferSize);
mRecord.startRecording();
try {
MediaFormat format=convertAudioConfigToFormat(mConfig.mAudio);
mAudioEncoder=MediaCodec.createEncoderByType(format.getString(MediaFormat.KEY_MIME));
mAudioEncoder.configure(format,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
mAudioEncoder.start();
} catch (IOException e) {
e.printStackTrace();
}
Thread thread=new Thread(new Runnable() {
@Override
public void run() {
while (!stopFlag&&!audioEncodeStep(false)){};
audioEncodeStep(true);
Log.e("wuwang","audio stop");
if(isStarted){
mRecord.stop();
mRecord.release();
mRecord=null;
}
if(mAudioEncoder!=null){
mAudioEncoder.stop();
mAudioEncoder.release();
mAudioEncoder=null;
}
isStarted=false;
}
});
thread.start();
startTime=SystemClock.elapsedRealtimeNanos();
isStarted=true;
}
}
示例7: renderHevcImage
import android.os.SystemClock; //导入方法依赖的package包/类
private static void renderHevcImage(ByteBuffer bitstream, ImageInfo info, Surface surface) {
long beginTime = SystemClock.elapsedRealtimeNanos();
// configure HEVC decoder
MediaCodec decoder = configureDecoder(info, bitstream.limit(), surface);
MediaFormat outputFormat = decoder.getOutputFormat();
Log.d(TAG, "HEVC output-format=" + outputFormat);
decoder.start();
try {
// set bitstream to decoder
int inputBufferId = decoder.dequeueInputBuffer(-1);
if (inputBufferId < 0) {
throw new IllegalStateException("dequeueInputBuffer return " + inputBufferId);
}
ByteBuffer inBuffer = decoder.getInputBuffer(inputBufferId);
inBuffer.put(bitstream);
decoder.queueInputBuffer(inputBufferId, 0, bitstream.limit(), 0, 0);
// notify end of stream
inputBufferId = decoder.dequeueInputBuffer(-1);
if (inputBufferId < 0) {
throw new IllegalStateException("dequeueInputBuffer return " + inputBufferId);
}
decoder.queueInputBuffer(inputBufferId, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
// get decoded image
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (true) {
int outputBufferId = decoder.dequeueOutputBuffer(bufferInfo, -1);
if (outputBufferId >= 0) {
decoder.releaseOutputBuffer(outputBufferId, true);
break;
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
outputFormat = decoder.getOutputFormat();
Log.d(TAG, "HEVC output-format=" + outputFormat);
} else {
Log.d(TAG, "HEVC dequeueOutputBuffer return " + outputBufferId);
}
}
decoder.flush();
} finally {
decoder.stop();
decoder.release();
}
long endTime = SystemClock.elapsedRealtimeNanos();
Log.i(TAG, "HEVC decoding elapsed=" + (endTime - beginTime) / 1000000.f + "[msec]");
}
示例8: getStopwatchSessionId
import android.os.SystemClock; //导入方法依赖的package包/类
@VisibleForTesting
protected String getStopwatchSessionId(@NonNull HttpRequest request) {
return request.method() + " " + request.url().getPath() + " (" + SystemClock.elapsedRealtimeNanos() + ")";
}
示例9: getStopwatchSessionId
import android.os.SystemClock; //导入方法依赖的package包/类
private String getStopwatchSessionId(@NonNull String methodName) {
return methodName + " (" + SystemClock.elapsedRealtimeNanos() + ")";
}
示例10: getStopwatchSessionId
import android.os.SystemClock; //导入方法依赖的package包/类
@SuppressWarnings("SameParameterValue")
private String getStopwatchSessionId(@NonNull String methodName) {
return methodName + " (" + SystemClock.elapsedRealtimeNanos() + ")";
}