虚拟机分析
时间:2023-06-25 13:39:01 | 来源:网站运营
时间:2023-06-25 13:39:01 来源:网站运营
虚拟机分析:虚拟机(Virtual Machine)指通过软件模拟的具有完整硬件系统功能的、运行在一个完全隔离环境中的完整计算机系统。在实体计算机中能够完成的工作在虚拟机中都能够实现。在计算机中创建虚拟机时,需要将实体机的部分硬盘和内存容量作为虚拟机的硬盘和内存容量。每个虚拟机都有独立的CMOS、硬盘和操作系统,可以像使用实体机一样对虚拟机进行操作。
正式版本的 VMProtect 虚拟机有比较严重的混淆(本质上是添加了一种冗余指令),直接使用 IDA 分析非常困难,许多基本块会截断,动态调试也不方便,里面大量的CALL/JMP,跳来跳去。ESI 还有指令的立即数等还有加密,整体复杂度有很大的提升。
对于这种情况如何处理呢?本文以 VMProtect 2.13.8 为例,展示如何在混淆比较严重的情况下找到虚拟机关键结构、快速的分析 Handler,提取出虚拟指令。
首先仍 IDA 打开样本,跳到 0x401000 处,看下前几条指令:
其实这些代码并没有什么有用的操作,先向栈中压入一些无用的数据,然后又通过
lea esp, [esp+44h]
把栈顶降回来,相当于把压入的数据弹出来,结果就是什么都没有做。同时使用了大量的 jmp 和 call 将代码切割成若干小块,使 IDA 无法正常分析。
dispatcher 结点的代码如下:
这篇文章介绍的方法,仅是大概方向和思路,想要完全自动化只能化分析还有大量工作要做,或者说有很长的路要走,如果你也多虚拟机的分析感兴趣,欢迎私下探讨。
下面我放上源码,需要学习的可以带走哈。
package org.easydarwin.easyscreenlive.screen_live.utils;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;
public class AvcEncoder
{
private final static String TAG = "MeidaCodec";
private int TIMEOUT_USEC = 12000;
private MediaCodec mediaCodec;
int m_width;
int m_height;
int m_framerate;
byte[] m_info = null;
public byte[] configbyte;
@SuppressLint("NewApi")
public AvcEncoder(int width, int height, int framerate, int bitrate) {
m_width = width;
m_height = height;
m_framerate = framerate;
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width*height*5);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
try {
mediaCodec = MediaCodec.createEncoderByType("video/avc");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
createfile();
}
private static String path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/test1.h264";
private BufferedOutputStream outputStream;
FileOutputStream outStream;
private void createfile(){
File file = new File(path);
if(file.exists()){
file.delete();
}
try {
outputStream = new BufferedOutputStream(new FileOutputStream(file));
} catch (Exception e){
e.printStackTrace();
}
}
@SuppressLint("NewApi")
private void StopEncoder() {
try {
mediaCodec.stop();
mediaCodec.release();
} catch (Exception e){
e.printStackTrace();
}
}
ByteBuffer[] inputBuffers;
ByteBuffer[] outputBuffers;
public boolean isRuning = false;
public void StopThread(){
isRuning = false;
try {
StopEncoder();
outputStream.flush();
outputStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
int count = 0;
public void StartEncoderThread(){
Thread EncoderThread = new Thread(new Runnable() {
@SuppressLint("NewApi")
@Override
public void run() {
isRuning = true;
byte[] input = null;
long pts = 0;
long generateIndex = 0;
while (isRuning) {
// if (MainActivity.YUVQueue.size() >0){
// input = MainActivity.YUVQueue.poll();
// byte[] yuv420sp = new byte[m_width*m_height*3/2];
// NV21ToNV12(input,yuv420sp,m_width,m_height);
// input = yuv420sp;
// }
if (input != null) {
try {
long startMs = System.currentTimeMillis();
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
pts = computePresentationTime(generateIndex);
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, pts, 0);
generateIndex += 1;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
while (outputBufferIndex >= 0) {
//Log.i("AvcEncoder", "Get H264 Buffer Success! flag = "+bufferInfo.flags+",pts = "+bufferInfo.presentationTimeUs+"");
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
if(bufferInfo.flags == 2){
configbyte = new byte[bufferInfo.size];
configbyte = outData;
}else if(bufferInfo.flags == 1){
byte[] keyframe = new byte[bufferInfo.size + configbyte.length];
System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
outputStream.write(keyframe, 0, keyframe.length);
}else{
outputStream.write(outData, 0, outData.length);
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC);
}
} catch (Throwable t) {
t.printStackTrace();
}
} else {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
});
EncoderThread.start();
}
private void NV21ToNV12(byte[] nv21,byte[] nv12,int width,int height){
if(nv21 == null || nv12 == null)return;
int framesize = width*height;
int i = 0,j = 0;
System.arraycopy(nv21, 0, nv12, 0, framesize);
for(i = 0; i < framesize; i++){
nv12[i] = nv21[i];
}
for (j = 0; j < framesize/2; j+=2)
{
nv12[framesize + j-1] = nv21[j+framesize];
}
for (j = 0; j < framesize/2; j+=2)
{
nv12[framesize + j] = nv21[j+framesize-1];
}
}
/**
* Generates the presentation time for frame N, in microseconds.
*/
private long computePresentationTime(long frameIndex) {
return 132 + frameIndex * 1000000 / m_framerate;
}
}