本文整理汇总了Java中android.os.Handler.getLooper方法的典型用法代码示例。如果您正苦于以下问题:Java Handler.getLooper方法的具体用法?Java Handler.getLooper怎么用?Java Handler.getLooper使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类android.os.Handler
的用法示例。
在下文中一共展示了Handler.getLooper方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: buildRenderers
import android.os.Handler; //导入方法依赖的package包/类
@Override
public void buildRenderers(DemoPlayer player) {
Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);
Handler mainHandler = player.getMainHandler();
// Build the video and audio renderers.
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, null);
DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE, mainHandler, player, 0);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
mainHandler, player, 50);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
MediaCodecSelector.DEFAULT, null, true, mainHandler, player,
AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
mainHandler.getLooper());
// Invoke the callback.
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(renderers, bandwidthMeter);
}
示例2: Mpr121InputDriver
import android.os.Handler; //导入方法依赖的package包/类
/**
* Create a new Mpr121InputDriver to forward capacitive touch events
* to the Android input framework.
*
* @param i2cName I2C port name where the controller is attached. Cannot be null.
* @param handler optional {@link Handler} for software polling and callback events.
* @param keyCodes {@link KeyEvent} codes to be emitted for each input channel.
* Length must match the input channel count of the
* touch controller.
*/
public Mpr121InputDriver(String i2cName, Handler handler, int[] keyCodes) throws IOException {
// Verify inputs
if (keyCodes == null) {
throw new IllegalArgumentException("Must provide a valid set of key codes.");
}
this.keycodes = keyCodes;
this.peripheralDevice = new Mpr121(i2cName);
this.inputHandler = new Handler(handler == null ? Looper.myLooper() : handler.getLooper());
this.inputStatus = new boolean[Mpr121.NB_ELECTRODES];
inputHandler.post(pollingCallback);
}
示例3: dispatch
import android.os.Handler; //导入方法依赖的package包/类
void dispatch(Handler handler, IpcListener listener, Parcelable event, Set<IpcListener> set) {
this.listener = listener;
this.event = event;
this.set = set;
if (handler.getLooper() == Looper.myLooper()) {
run();
} else {
handler.post(this);
}
}
示例4: Sht1xSensor
import android.os.Handler; //导入方法依赖的package包/类
/**
* Create a new SHT1x sensor driver attached to the given GPIOs.
* @param gpioData Pin connected to SCK on the sensor.
* @param gpioSck Pin connected to SCK on the sensor.
* @param vdd Supply voltage (Vdd) used to power the sensor.
* @param handler
* @throws IOException Sensor error
*/
public Sht1xSensor(String gpioData, String gpioSck, float vdd, Handler handler) throws IOException {
if ((vdd < SHT1X_VDD_MIN) || (vdd > SHT1X_VDD_MAX)) {
final String msg = String.format("Vdd must be between %.1f and %.1f",
SHT1X_VDD_MIN, SHT1X_VDD_MAX);
throw new IllegalArgumentException(msg);
}
mD1 = calculateD1(vdd);
// Get the default handler if handler is not specified
mHandler = new Handler(handler == null ? Looper.myLooper() : handler.getLooper());
// Timer for scheduling measurements.
mTimer = new Timer();
mPeripheralManager = new PeripheralManagerService();
try {
mGpioData = mPeripheralManager.openGpio(gpioData);
mGpioData.setActiveType(Gpio.ACTIVE_HIGH);
mGpioSck = mPeripheralManager.openGpio(gpioSck);
mGpioSck.setActiveType(Gpio.ACTIVE_HIGH);
// Reset the connection, in case the sensor is in a strange state.
connectionReset();
} catch (IOException|RuntimeException e) {
try {
close();
} catch (IOException|RuntimeException ignored) {
}
throw e;
}
}
示例5: GestureHandler
import android.os.Handler; //导入方法依赖的package包/类
GestureHandler(Handler handler) {
super(handler.getLooper());
}
示例6: setHandler
import android.os.Handler; //导入方法依赖的package包/类
private void setHandler(Handler handler) {
this.mHandler = new MessageHandler(handler.getLooper());
}
示例7: buildRenderers
import android.os.Handler; //导入方法依赖的package包/类
private void buildRenderers() {
Period period = manifest.getPeriod(0);
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
hasContentProtection |= adaptationSet.hasContentProtection();
}
}
// Check drm support if necessary.
boolean filterHdContent = false;
StreamingDrmSessionManager drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
player.onRenderersError(
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
return;
}
try {
drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
} catch (UnsupportedDrmException e) {
player.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
drmSessionManager, true, mainHandler, player, 50);
// Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
// Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
mainHandler.getLooper());
// Invoke the callback.
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(renderers, bandwidthMeter);
}
示例8: buildRenderers
import android.os.Handler; //导入方法依赖的package包/类
private void buildRenderers() {
Period period = manifest.getPeriod(0);
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
boolean hasContentProtection = false;
for (int i = 0; i < period.adaptationSets.size(); i++) {
AdaptationSet adaptationSet = period.adaptationSets.get(i);
if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) {
hasContentProtection |= adaptationSet.hasContentProtection();
}
}
// Check drm support if necessary.
boolean filterHdContent = false;
StreamingDrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null;
if (hasContentProtection) {
if (Util.SDK_INT < 18) {
player.onRenderersError(
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
return;
}
try {
drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(
player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player);
filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1;
} catch (UnsupportedDrmException e) {
player.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent),
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
drmSessionManager, true, mainHandler, player, 50);
// Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
// Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new DashChunkSource(manifestFetcher,
DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS,
elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
mainHandler.getLooper());
// Invoke the callback.
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(renderers, bandwidthMeter);
}
示例9: onSingleManifest
import android.os.Handler; //导入方法依赖的package包/类
@Override
public void onSingleManifest(SmoothStreamingManifest manifest) {
if (canceled) {
return;
}
Handler mainHandler = player.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player);
// Check drm support if necessary.
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null;
if (manifest.protectionElement != null) {
if (Util.SDK_INT < 18) {
player.onRenderersError(
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME));
return;
}
try {
drmSessionManager = StreamingDrmSessionManager.newFrameworkInstance(
manifest.protectionElement.uuid, player.getPlaybackLooper(), drmCallback, null,
player.getMainHandler(), player);
} catch (UnsupportedDrmException e) {
player.onRenderersError(e);
return;
}
}
// Build the video renderer.
DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false),
videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_VIDEO);
TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource,
MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
drmSessionManager, true, mainHandler, player, 50);
// Build the audio renderer.
DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newAudioInstance(),
audioDataSource, null, LIVE_EDGE_LATENCY_MS);
ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_AUDIO);
TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource,
MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player,
AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
// Build the text renderer.
DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
DefaultSmoothStreamingTrackSelector.newTextInstance(),
textDataSource, null, LIVE_EDGE_LATENCY_MS);
ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl,
TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player,
DemoPlayer.TYPE_TEXT);
TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player,
mainHandler.getLooper());
// Invoke the callback.
TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
player.onRenderers(renderers, bandwidthMeter);
}
示例10: HpmSensorDriver
import android.os.Handler; //导入方法依赖的package包/类
/**
* Create a new HPM sensor driver connected to the given UART.
* The driver emits {@link Sensor} with PM2.5 and PM10 particle
* could when registered.
*
* @param uartDevice Name of UART device the sensor is connected to.
* @param handler Name of UART device the sensor is connected to.
* @throws IOException Sensor error
* @see #registerParticleSensor()
*/
public HpmSensorDriver(String uartDevice, Handler handler) throws IOException {
mHandler = new Handler(handler == null ? Looper.myLooper() : handler.getLooper());
mDevice = new HpmSensor(uartDevice, mHandler);
}