I'm using Google sample https://developers.google.com/ar/develop/java/augmented-images/ and I want to modify it so it will be able to display bitmap over AugumentedImage.
Currently I'm able to display bitmap on android.opengl.GLSurfaceView
but I have no idea how to position it so it will match visible augumented image.
The way I'm displaying bitmap
class BitmapRenerer {
private static final String TAG = BitmapRenerer.class.getSimpleName();
//Reference to Activity Context
private final Context context;
//Added for Textures
private final FloatBuffer cubeTextureCoordinates;
private int textureDataHandle;
private final int shaderProgram;
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
// number of coordinates per vertex in this array
private static final int COORDS_PER_VERTEX = 2;
private static float[] spriteCoords = {
-0.15f, 0.15f, // top left
-0.15f, -0.15f, // bottom left
0.15f, -0.15f, // bottom right
0.15f, 0.15f}; //top right
private short[] drawOrder = {0, 1, 2, 0, 2, 3}; // order to draw vertices
// Set color with red, green, blue and alpha (opacity) values
private float[] color = {1f, 1f, 1f, 1.0f};
BitmapRenerer(final Context activityContext) throws IOException {
context = activityContext;
//Initialize Vertex Byte Buffer for Shape Coordinates / # of coordinate values * 4 bytes per float
ByteBuffer bb = ByteBuffer.allocateDirect(spriteCoords.length * 4);
//Use the Device's Native Byte Order
bb.order(ByteOrder.nativeOrder());
//Create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
//Add the coordinates to the FloatBuffer
vertexBuffer.put(spriteCoords);
//Set the Buffer to Read the first coordinate
vertexBuffer.position(0);
final float[] cubeTextureCoordinateData = {
0.0f, 0.0f, // top left
0.0f, 1.0f, // Top-right
1.0f, 1.0f, // Bottom-right
1.0f, 0.0f // Bottom-left
};
cubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
cubeTextureCoordinates.put(cubeTextureCoordinateData).position(0);
//Initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(spriteCoords.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = ShaderUtil.loadGLShader(TAG, activityContext, GLES20.GL_VERTEX_SHADER, "shaders/bitmap/vertex_shader.vert");
int fragmentShader = ShaderUtil.loadGLShader(TAG, activityContext, GLES20.GL_FRAGMENT_SHADER, "shaders/bitmap/vertex_shader.frag");
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram, vertexShader);
GLES20.glAttachShader(shaderProgram, fragmentShader);
//Texture Code
GLES20.glBindAttribLocation(shaderProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(shaderProgram);
//Load the texture
textureDataHandle = loadTexture(context);
}
public void draw(float[] mvpMatrix) {
//Add program to OpenGL ES Environment
GLES20.glUseProgram(shaderProgram);
//Get handle to vertex shader's vPosition member
int mPositionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition");
//Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Prepare the triangle coordinate data
//Bytes per vertex
int vertexStride = COORDS_PER_VERTEX * 4;
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//Get Handle to Fragment Shader's vColor member
int mColorHandle = GLES20.glGetUniformLocation(shaderProgram, "vColor");
//Set the Color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
//Set Texture Handles and bind Texture
int textureUniformHandle = GLES20.glGetAttribLocation(shaderProgram, "u_Texture");
int textureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "a_TexCoordinate");
//Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureDataHandle);
//Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(textureUniformHandle, 0);
//Pass in the texture coordinate information
cubeTextureCoordinates.position(0);
int textureCoordinateDataSize = 2;
GLES20.glVertexAttribPointer(textureCoordinateHandle, textureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, cubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(textureCoordinateHandle);
//Get Handle to Shape's Transformation Matrix
int mMVPMatrixHandle = GLES20.glGetUniformLocation(shaderProgram, "uMVPMatrix");
//Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
// try
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
//
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ONE_MINUS_SRC_ALPHA);
//transpaency du bitmap+text !!!
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
//draw the triangle
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
//Disable Vertex Array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public int loadTexture(final Context context) {
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0) {
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
options.inMutable = true;
// Bind to the texture in OpenGL
Bitmap bitmap = drawTextToBitmap();
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// GLUtils.texImage2D(GLES20. GL_UNSIGNED_SHORT_5_5_5_1,0, bitmap, 0);
// GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA,256,256, 0, GLES20.GL_RGBA,GLES20.GL_UNSIGNED_BYTE,textureHandle[0]);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0) {
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
void setCoords(float a1, float a2, float b1, float b2, float c1, float c2, float d1, float d2) {
spriteCoords[0] = a1;
spriteCoords[1] = a2;
spriteCoords[2] = b1;
spriteCoords[3] = b2;
spriteCoords[4] = c1;
spriteCoords[5] = c2;
spriteCoords[6] = d1;
spriteCoords[7] = d2;
ByteBuffer bb = ByteBuffer.allocateDirect(
// (# of coordinate values * 4 bytes per float)
spriteCoords.length * 4);
bb.order(ByteOrder.nativeOrder());
vertexBuffer.put(spriteCoords);
vertexBuffer.position(0);
// initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(
// (# of coordinate values * 2 bytes per short)
drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
}
private Bitmap drawTextToBitmap() {
Bitmap bitmap = Bitmap.createBitmap(256, 256, Bitmap.Config.ARGB_4444);
// get a canvas to paint over the bitmap
Canvas canvas = new Canvas(bitmap);
bitmap.eraseColor(android.graphics.Color.TRANSPARENT);
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
TextPaint textPaint = new TextPaint(TextPaint.ANTI_ALIAS_FLAG);
textPaint.setStyle(Paint.Style.FILL);
textPaint.setAntiAlias(true);
textPaint.setColor(Color.RED);
textPaint.setTextSize(30);
TextView tv = new TextView(context);
tv.setTextColor(Color.RED);
tv.setTextSize(10);
String text = "DEMO TEXT DEMO";
tv.setTextSize(10f);
tv.setText(text);
tv.setEllipsize(TextUtils.TruncateAt.END);
tv.setMaxLines(4);
tv.setGravity(Gravity.BOTTOM);
tv.setPadding(8, 8, 8, 50);
tv.setDrawingCacheEnabled(true);
tv.measure(View.MeasureSpec.makeMeasureSpec(canvas.getWidth(),
View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(
canvas.getHeight(), View.MeasureSpec.EXACTLY));
tv.layout(0, 0, tv.getMeasuredWidth(), tv.getMeasuredHeight());
LinearLayout parent = null;
if (bitmap != null && !bitmap.isRecycled()) {
parent = new LinearLayout(context);
parent.setBackgroundColor(Color.GRAY);
parent.setDrawingCacheEnabled(true);
parent.measure(View.MeasureSpec.makeMeasureSpec(canvas.getWidth(),
View.MeasureSpec.EXACTLY), View.MeasureSpec.makeMeasureSpec(
canvas.getHeight(), View.MeasureSpec.EXACTLY));
parent.layout(0, 0, parent.getMeasuredWidth(),
parent.getMeasuredHeight());
parent.setLayoutParams(new LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT));
parent.setOrientation(LinearLayout.VERTICAL);
parent.setBackgroundColor(Color.GRAY);
// parent.setBackgroundColor(context.getResources().getColor(android.R.color.transparent));
parent.addView(tv);
} else {
// write code to recreate bitmap from source
// Write code to show bitmap to canvas
}
canvas.drawBitmap(parent.getDrawingCache(), 0, 0, textPaint);
tv.setDrawingCacheEnabled(false);
parent.setDrawingCacheEnabled(false);
return bitmap;
}
}
I know that I have to pass proper coords in this class, but I have no idea how to obtain them. I know that augmentedImage.getCenterPose()
might be useful in this case, but I have no idea how to map this data so it will be usable for void setCoords(float a1, float a2, float b1, float b2, float c1, float c2, float d1, float d2)
method.
from Align bitmap in Android OpenGL Es - ArCore
No comments:
Post a Comment