2024年2月1日木曜日

Room Reflect Shooter MRリリース

https://www.meta.com/ja-jp/experiences/7082792821834790/



2024年1月24日水曜日

Room Reflect Shooter MR privacy policy

1. Data Collection Explanation:
Our app does not collect any personal data from users. We respect your privacy and ensure that no personal information is gathered.

2. Data Usage Explanation:
Since our app does not collect any personal data, we do not use any user data for any purpose.

3. Data Deletion Policy:
As no personal data is collected or stored by our app, there is no need for a data deletion request.

4. Data Protection Compliance:
Our organization and app comply with data protection regulations. We do not collect or store any user data, thereby eliminating the risk of security vulnerabilities related to data privacy.

Room Reflect Shooter MR

 Overview:

This is an MR shooting game consisting of three stages. In the entrance scene, you select a stage and click the start button to begin each stage. Enemies appear on the walls of the real room, and you shoot them down with fireballs released by pulling the controller's trigger. The fireballs reflect off the walls of the real room. The game ends after a set time, resulting in either game clearance or the appearance of a boss. Defeating the boss results in game clearance. Upon clearing the game, your score is recorded based on the type of enemies defeated, with rankings up to the top 10.

Detailed Instructions and Cautions:

-If the walls matching the room's shape do not appear in the entrance scene, the room model setup is incomplete; close the app and perform a room scan in the settings.

If you don't know how to set it up, you can also scan the room model by running the First Encounter app for Quest3.

-In the entrance, you can reset the position of the menu panel using the start button on the left controller.

-During the game, pressing the start button on the left controller toggles the hand menu display.

In the entrance or hand menu, you can toggle the music and controller vibration on and off.

-You can fire up to five fireballs at once.

-Fireballs disappear after hitting an enemy, yourself, or after five seconds.

-Fireballs, whether yours or the enemy's, cause damage if they hit your head, and the game ends in game over if you take a certain amount of damage.


Privacy Policy

2023年12月14日木曜日

UnityでGeminiを使用する

↓ChatGPTさんにドキュメント読んでもらって書いてもらったものそのまま


using UnityEngine;

using UnityEngine.Networking;

using System.Collections;


public class GoogleGeminiAPI : MonoBehaviour

{

    private string apiKey = "A"; // APIキーを設定

    private string apiUrl = "https://generativelanguage.googleapis.com/v1beta3/models/text-bison-001:generateText";


    void Start()

    {

        StartCoroutine(SendRequestToGemini("Write a story about a magic backpack"));

    }


    IEnumerator SendRequestToGemini(string promptText)

    {

        string requestData = "{\"prompt\": {\"text\": \"" + promptText + "\"}}";

        byte[] bodyRaw = System.Text.Encoding.UTF8.GetBytes(requestData);


        UnityWebRequest request = new UnityWebRequest(apiUrl + "?key=" + apiKey, "POST");

        request.uploadHandler = new UploadHandlerRaw(bodyRaw);

        request.downloadHandler = new DownloadHandlerBuffer();

        request.SetRequestHeader("Content-Type", "application/json");


        yield return request.SendWebRequest();


        if (request.result != UnityWebRequest.Result.Success)

        {

            Debug.LogError("Error: " + request.error);

        }

        else

        {

            Debug.Log("Response: " + request.downloadHandler.text);

            // ここでレスポンスの処理を行います

        }

    }

}


2023年11月20日月曜日

奥の物体を描画しなくなるシェーダー(VR,MR向け)

 Shader "Custom/OcclusionShader" {

    Properties{

        _Color("Main Color", Color) = (1,1,1,1)

    }

        SubShader{

            Tags { "RenderType" = "Opaque" }

            LOD 100


            Pass {

                Stencil {

                    Ref 1

                    Comp always

                    Pass replace

                }


                CGPROGRAM

                #pragma vertex vert

                #pragma fragment frag

                #include "UnityCG.cginc"


                struct appdata {

                    float4 vertex : POSITION;

                    float3 normal : NORMAL;

                    UNITY_VERTEX_INPUT_INSTANCE_ID 

                };


                struct v2f {

                    float4 pos : SV_POSITION;

                    UNITY_VERTEX_OUTPUT_STEREO 

                };


                fixed4 _Color;


                v2f vert(appdata v) {

                    v2f o;

                    UNITY_SETUP_INSTANCE_ID(v); 

                    UNITY_INITIALIZE_OUTPUT(v2f, o); 

                    UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o); 


                    o.pos = UnityObjectToClipPos(v.vertex);

                    return o;

                }


                fixed4 frag(v2f i) : SV_Target {

                    return fixed4(0,0,0,0); // Render invisible color

                }

                ENDCG

            }


            Pass {

                Stencil {

                    Ref 1

                    Comp notequal

                    Pass keep

                }


                CGPROGRAM

                #pragma vertex vert

                #pragma fragment frag

                #include "UnityCG.cginc"


                struct appdata {

                    float4 vertex : POSITION;

                    float3 normal : NORMAL;

                    UNITY_VERTEX_INPUT_INSTANCE_ID 

                };


                struct v2f {

                    float4 pos : SV_POSITION;

                    UNITY_VERTEX_OUTPUT_STEREO 

                };


                fixed4 _Color;


                v2f vert(appdata v) {

                    v2f o;

                    UNITY_SETUP_INSTANCE_ID(v); 

                    UNITY_INITIALIZE_OUTPUT(v2f, o); 

                    UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o); 

                    o.pos = UnityObjectToClipPos(v.vertex);

                    return o;

                }


                fixed4 frag(v2f i) : SV_Target {

                    // Render with object color

                    return _Color;

                }

                ENDCG

            }

    }

        FallBack "Diffuse"

}


2023年11月17日金曜日

GPT-4Vを使ってカメラ画像とテキストを用いて対話する

 using UnityEngine;

using System.Collections;

using UnityEngine.Networking;

using System;

using TMPro;

using Newtonsoft.Json;

using System.Collections.Generic; 


public class ImageRequester : MonoBehaviour

{

    private string apiURL = "https://api.openai.com/v1/chat/completions";

    private string apiKey = "APIKey"; 

    public Camera mainCamera;

    public RenderTexture renderTexture;

    public TMP_Text tmpText;

    public bool TextVisible = false;


    private List<Message> conversationHistory = new List<Message>();




    [System.Serializable]

    public class ChatRequest

    {

        public string model = "gpt-4-vision-preview";

        public Message[] messages;

        public int max_tokens = 300;

    }


    [System.Serializable]

    public class Message

    {

        public string role;

        public Content[] content;

    }


    [Serializable]

    public class Content

    {

        public string type;

        public string text;

        public ImageURL image_url;

    }

       

    [Serializable]

    public class ImageURL

    {

        public string url;

    }


    [Serializable]

    public class ResponseData

    {

        public Choice[] choices;


        [Serializable]

        public class Choice

        {

            public Message message;

        }


        [Serializable]

        public class Message

        {

            public string content;

        }

    }



    private string ParseResponse(string response)

    {

        try

        {

            ResponseData data = JsonConvert.DeserializeObject<ResponseData>(response);

            if (data != null && data.choices != null && data.choices.Length > 0 && data.choices[0].message != null)

            {

                return data.choices[0].message.content;

            }

        }

        catch (Exception e)

        {

            Debug.LogError("Error parsing response: " + e.Message);

        }

        return string.Empty;

    }



    void Start()

    {

        conversationHistory.Add(new Message

        {

            role = "system",

            content = new[]

            {

                         new Content { type = "text", text = "あなたは親切なアシスタントです。" }

            }

        });

    }



    public void RequestButtonClicked(string prompt)

    {

        if (tmpText.IsActive())

        {

            tmpText.gameObject.SetActive(false);


        }

        else

        {

            tmpText.gameObject.SetActive(true);

            tmpText.text = "";

            ImageRequestToGPT(prompt);


        }

    }


    public void ImageRequestToGPT(string prompt)

    {


        Debug.Log("Button Clicked. Requesting image description...");


        mainCamera.targetTexture = renderTexture;

        mainCamera.Render();

        

        RenderTexture.active = renderTexture;

        Texture2D image = new Texture2D(renderTexture.width, renderTexture.height);

        image.ReadPixels(new Rect(0, 0, renderTexture.width, renderTexture.height), 0, 0);

        image.Apply();


        tmpText.text = "Start requesting image description...";

   

        byte[] imageBytes = image.EncodeToPNG();

        string base64Image = Convert.ToBase64String(imageBytes);


        mainCamera.targetTexture = null;

        RenderTexture.active = null; 


        StartCoroutine(CallOpenAIVisionAPI(prompt,base64Image)); 

        Destroy(image);

    }



    IEnumerator CallOpenAIVisionAPI(string prompt, string base64Image)

    {

        var request = new UnityWebRequest(apiURL, "POST");

        Message newMessage = new Message

        {

            role = "user",

            content = new[]

                     {

                         new Content { type = "text", text = prompt },

                         new Content { type = "image_url", image_url = new ImageURL{url=$"data:image/png;base64,{base64Image}" } }

                     }

        };


        conversationHistory.Add(newMessage);


        Message[] messagesToSend = conversationHistory.ToArray();

        ChatRequest chatRequest = new ChatRequest { messages = messagesToSend };




        var settings = new JsonSerializerSettings

        {

            NullValueHandling = NullValueHandling.Ignore

        };


        string jsonPayload = JsonConvert.SerializeObject(chatRequest, settings);


        Debug.Log("jsonPayload" + jsonPayload);



        request.uploadHandler = new UploadHandlerRaw(System.Text.Encoding.UTF8.GetBytes(jsonPayload));

        request.downloadHandler = new DownloadHandlerBuffer();

        request.SetRequestHeader("Content-Type", "application/json");

        request.SetRequestHeader("Authorization", $"Bearer {apiKey}");



        yield return request.SendWebRequest();



        if (request.isNetworkError || request.isHttpError)

        {

            Debug.LogError($"Error: {request.error}");

        }

        else

        {


            string description = ParseResponse(request.downloadHandler.text);

            Debug.Log("Description: " + description);


            tmpText.text = description;


        }

    }

}